[ 602.079385] env[68437]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68437) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 602.079725] env[68437]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68437) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 602.079850] env[68437]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68437) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 602.080165] env[68437]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 602.177177] env[68437]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68437) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 602.186552] env[68437]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=68437) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 602.230155] env[68437]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 602.789097] env[68437]: INFO nova.virt.driver [None req-16431b26-e642-4405-8bb4-8cf6a09ca8fd None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 602.859606] env[68437]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.859769] env[68437]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.859874] env[68437]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68437) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 605.729335] env[68437]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-fb8c341e-83d3-4802-8fc0-6501b40a5ff7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.746180] env[68437]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68437) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 605.746346] env[68437]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-8e6712a5-bcd3-4baf-bd59-15f619277cb8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.777123] env[68437]: INFO oslo_vmware.api [-] Successfully established new session; session ID is db63f. [ 605.778288] env[68437]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.917s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.778288] env[68437]: INFO nova.virt.vmwareapi.driver [None req-16431b26-e642-4405-8bb4-8cf6a09ca8fd None None] VMware vCenter version: 7.0.3 [ 605.782055] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f75c840-b009-4ae6-b64d-e14b9af8487d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.798662] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564a8aa9-e6b4-49d9-b952-e83fd38f4c3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.804645] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf16ea8b-0182-4d6d-bf5e-555b1f198418 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.811118] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460be773-ddc0-47ef-9c17-c59fac021a4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.824008] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad5bd73-e24b-44e6-a189-5046824b43c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.829721] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd3f188-d27b-4b03-90da-9aad1ca6ca68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.859980] env[68437]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-14481b22-8395-49e1-b6fe-9704f6779bdd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.864985] env[68437]: DEBUG nova.virt.vmwareapi.driver [None req-16431b26-e642-4405-8bb4-8cf6a09ca8fd None None] Extension org.openstack.compute already exists. {{(pid=68437) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 605.867655] env[68437]: INFO nova.compute.provider_config [None req-16431b26-e642-4405-8bb4-8cf6a09ca8fd None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 606.371255] env[68437]: DEBUG nova.context [None req-16431b26-e642-4405-8bb4-8cf6a09ca8fd None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),9d5cbcf9-2aad-42ee-ac20-dd167d26205c(cell1) {{(pid=68437) load_cells /opt/stack/nova/nova/context.py:464}} [ 606.373466] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.373698] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.374513] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.374945] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Acquiring lock "9d5cbcf9-2aad-42ee-ac20-dd167d26205c" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.375148] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Lock "9d5cbcf9-2aad-42ee-ac20-dd167d26205c" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.376151] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Lock "9d5cbcf9-2aad-42ee-ac20-dd167d26205c" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.396534] env[68437]: INFO dbcounter [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Registered counter for database nova_cell0 [ 606.405465] env[68437]: INFO dbcounter [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Registered counter for database nova_cell1 [ 606.844420] env[68437]: DEBUG oslo_db.sqlalchemy.engines [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68437) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 606.845150] env[68437]: DEBUG oslo_db.sqlalchemy.engines [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68437) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 606.849935] env[68437]: ERROR nova.db.main.api [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 606.849935] env[68437]: result = function(*args, **kwargs) [ 606.849935] env[68437]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 606.849935] env[68437]: return func(*args, **kwargs) [ 606.849935] env[68437]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 606.849935] env[68437]: result = fn(*args, **kwargs) [ 606.849935] env[68437]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 606.849935] env[68437]: return f(*args, **kwargs) [ 606.849935] env[68437]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 606.849935] env[68437]: return db.service_get_minimum_version(context, binaries) [ 606.849935] env[68437]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 606.849935] env[68437]: _check_db_access() [ 606.849935] env[68437]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 606.849935] env[68437]: stacktrace = ''.join(traceback.format_stack()) [ 606.849935] env[68437]: [ 606.850787] env[68437]: ERROR nova.db.main.api [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 606.850787] env[68437]: result = function(*args, **kwargs) [ 606.850787] env[68437]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 606.850787] env[68437]: return func(*args, **kwargs) [ 606.850787] env[68437]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 606.850787] env[68437]: result = fn(*args, **kwargs) [ 606.850787] env[68437]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 606.850787] env[68437]: return f(*args, **kwargs) [ 606.850787] env[68437]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 606.850787] env[68437]: return db.service_get_minimum_version(context, binaries) [ 606.850787] env[68437]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 606.850787] env[68437]: _check_db_access() [ 606.850787] env[68437]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 606.850787] env[68437]: stacktrace = ''.join(traceback.format_stack()) [ 606.850787] env[68437]: [ 606.851210] env[68437]: WARNING nova.objects.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Failed to get minimum service version for cell 9d5cbcf9-2aad-42ee-ac20-dd167d26205c [ 606.851360] env[68437]: WARNING nova.objects.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 606.851842] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Acquiring lock "singleton_lock" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.852018] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Acquired lock "singleton_lock" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.852292] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Releasing lock "singleton_lock" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.852633] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Full set of CONF: {{(pid=68437) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 606.852781] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ******************************************************************************** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 606.852925] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] Configuration options gathered from: {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 606.853050] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 606.853260] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 606.853382] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ================================================================================ {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 606.853589] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] allow_resize_to_same_host = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.853762] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] arq_binding_timeout = 300 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.853908] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] backdoor_port = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.854056] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] backdoor_socket = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.854230] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] block_device_allocate_retries = 60 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.854394] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] block_device_allocate_retries_interval = 3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.854568] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cert = self.pem {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.854741] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.854913] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute_monitors = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.855094] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] config_dir = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.855497] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] config_drive_format = iso9660 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.855644] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.855824] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] config_source = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.856023] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] console_host = devstack {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.856189] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] control_exchange = nova {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.856353] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cpu_allocation_ratio = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.856516] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] daemon = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.856684] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] debug = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.856843] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] default_access_ip_network_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.857014] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] default_availability_zone = nova {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.857179] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] default_ephemeral_format = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.857335] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] default_green_pool_size = 1000 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.857576] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.857738] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] default_schedule_zone = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.857894] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] disk_allocation_ratio = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.858066] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] enable_new_services = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.858246] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] enabled_apis = ['osapi_compute'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.858407] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] enabled_ssl_apis = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.858566] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] flat_injected = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.858724] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] force_config_drive = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.858880] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] force_raw_images = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.859055] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] graceful_shutdown_timeout = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.859220] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] heal_instance_info_cache_interval = -1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.859444] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] host = cpu-1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.859623] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.859788] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.859947] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.860180] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.860347] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] instance_build_timeout = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.860505] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] instance_delete_interval = 300 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.860670] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] instance_format = [instance: %(uuid)s] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.860834] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] instance_name_template = instance-%08x {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.860993] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] instance_usage_audit = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.861176] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] instance_usage_audit_period = month {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.861341] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.861503] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.861705] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] internal_service_availability_zone = internal {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.861868] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] key = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.862036] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] live_migration_retry_count = 30 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.862209] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] log_color = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.862372] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] log_config_append = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.862540] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.862750] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] log_dir = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.862941] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] log_file = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.863088] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] log_options = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.863257] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] log_rotate_interval = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.863430] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] log_rotate_interval_type = days {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.863598] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] log_rotation_type = none {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.863731] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.863875] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.864085] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.864268] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.864400] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.864564] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] long_rpc_timeout = 1800 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.864729] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] max_concurrent_builds = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.864887] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] max_concurrent_live_migrations = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.865058] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] max_concurrent_snapshots = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.865225] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] max_local_block_devices = 3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.865383] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] max_logfile_count = 30 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.865541] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] max_logfile_size_mb = 200 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.865703] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] maximum_instance_delete_attempts = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.865869] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] metadata_listen = 0.0.0.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.866049] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] metadata_listen_port = 8775 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.866223] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] metadata_workers = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.866385] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] migrate_max_retries = -1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.866553] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] mkisofs_cmd = genisoimage {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.866765] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.866899] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] my_ip = 10.180.1.21 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.867117] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.867284] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] network_allocate_retries = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.867459] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.867627] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.867788] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] osapi_compute_listen_port = 8774 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.867957] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] osapi_compute_unique_server_name_scope = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.868139] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] osapi_compute_workers = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.868300] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] password_length = 12 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.868457] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] periodic_enable = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.868614] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] periodic_fuzzy_delay = 60 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.868785] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] pointer_model = usbtablet {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.868949] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] preallocate_images = none {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.869121] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] publish_errors = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.869254] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] pybasedir = /opt/stack/nova {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.869408] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ram_allocation_ratio = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.869567] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] rate_limit_burst = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.869737] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] rate_limit_except_level = CRITICAL {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.869894] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] rate_limit_interval = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.870063] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] reboot_timeout = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.870225] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] reclaim_instance_interval = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.870379] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] record = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.870546] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] reimage_timeout_per_gb = 60 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.870711] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] report_interval = 120 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.870872] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] rescue_timeout = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.871043] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] reserved_host_cpus = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.871209] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] reserved_host_disk_mb = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.871370] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] reserved_host_memory_mb = 512 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.871551] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] reserved_huge_pages = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.871746] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] resize_confirm_window = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.871918] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] resize_fs_using_block_device = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.872097] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] resume_guests_state_on_host_boot = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.872270] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.872432] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] rpc_response_timeout = 60 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.872593] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] run_external_periodic_tasks = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.872794] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] running_deleted_instance_action = reap {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.872961] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.873138] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] running_deleted_instance_timeout = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.873301] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler_instance_sync_interval = 120 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.873466] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_down_time = 720 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.873636] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] servicegroup_driver = db {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.873794] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] shell_completion = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.873982] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] shelved_offload_time = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.874166] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] shelved_poll_interval = 3600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.874335] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] shutdown_timeout = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.874496] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] source_is_ipv6 = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.874655] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ssl_only = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.874924] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.875112] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] sync_power_state_interval = 600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.875281] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] sync_power_state_pool_size = 1000 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.875454] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] syslog_log_facility = LOG_USER {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.875613] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] tempdir = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.875777] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] timeout_nbd = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.875945] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] transport_url = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.876125] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] update_resources_interval = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.876286] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] use_cow_images = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.876448] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] use_journal = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.876608] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] use_json = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.876767] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] use_rootwrap_daemon = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.876924] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] use_stderr = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.877093] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] use_syslog = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.877254] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vcpu_pin_set = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.877422] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plugging_is_fatal = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.877591] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plugging_timeout = 300 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.877760] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] virt_mkfs = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.877921] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] volume_usage_poll_interval = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.878091] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] watch_log_file = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.878263] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] web = /usr/share/spice-html5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 606.878448] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.878615] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.878782] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.878949] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_concurrency.disable_process_locking = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.879256] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.879445] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.879616] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.879790] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.879958] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.880138] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.880323] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.auth_strategy = keystone {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.880490] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.compute_link_prefix = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.880706] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.880897] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.dhcp_domain = novalocal {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.881080] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.enable_instance_password = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.881248] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.glance_link_prefix = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.881414] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.881616] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.881813] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.instance_list_per_project_cells = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.881956] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.list_records_by_skipping_down_cells = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.882158] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.local_metadata_per_cell = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.882343] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.max_limit = 1000 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.882519] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.metadata_cache_expiration = 15 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.882719] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.neutron_default_tenant_id = default {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.882910] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.response_validation = warn {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.883100] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.use_neutron_default_nets = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.883277] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.883442] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.883609] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.883786] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.883958] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.vendordata_dynamic_targets = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.884139] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.vendordata_jsonfile_path = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.884320] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.884518] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.backend = dogpile.cache.memcached {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.884689] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.backend_argument = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.884850] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.backend_expiration_time = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.885029] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.config_prefix = cache.oslo {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.885204] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.dead_timeout = 60.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.885368] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.debug_cache_backend = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.885528] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.enable_retry_client = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.885690] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.enable_socket_keepalive = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.885856] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.enabled = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.886025] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.enforce_fips_mode = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.886193] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.expiration_time = 600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.886354] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.hashclient_retry_attempts = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.886519] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.886682] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_dead_retry = 300 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.886840] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_password = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.887006] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.887176] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.887339] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_pool_maxsize = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.887502] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.887666] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_sasl_enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.887846] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.888025] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.888185] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.memcache_username = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.888350] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.proxies = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.888511] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.redis_db = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.888671] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.redis_password = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.888916] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.889131] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.889310] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.redis_server = localhost:6379 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.889478] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.redis_socket_timeout = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.889639] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.redis_username = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.889804] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.retry_attempts = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.889969] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.retry_delay = 0.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.890147] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.socket_keepalive_count = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.890311] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.socket_keepalive_idle = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.890472] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.socket_keepalive_interval = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.890629] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.tls_allowed_ciphers = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.890830] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.tls_cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.891014] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.tls_certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.891186] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.tls_enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.891344] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cache.tls_keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.891526] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.auth_section = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.891723] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.auth_type = password {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.891912] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.892071] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.892237] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.892401] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.892563] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.cross_az_attach = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.892748] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.debug = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.892912] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.endpoint_template = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.893088] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.http_retries = 3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.893253] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.893411] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.893581] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.os_region_name = RegionOne {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.893746] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.893903] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cinder.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.894086] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.894285] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.cpu_dedicated_set = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.894450] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.cpu_shared_set = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.894616] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.image_type_exclude_list = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.894787] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.894946] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.895121] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.895284] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.895453] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.895614] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.resource_provider_association_refresh = 300 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.895774] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.895932] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.shutdown_retry_interval = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.896127] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.896308] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] conductor.workers = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.896488] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] console.allowed_origins = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.896651] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] console.ssl_ciphers = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.896826] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] console.ssl_minimum_version = default {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.896996] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] consoleauth.enforce_session_timeout = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.897183] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] consoleauth.token_ttl = 600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.897359] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.897519] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.897682] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.897841] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.connect_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.898007] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.connect_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.898174] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.endpoint_override = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.898338] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.898496] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.898656] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.max_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.898816] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.min_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.898975] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.region_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.899150] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.retriable_status_codes = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.899313] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.service_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.899485] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.service_type = accelerator {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.899647] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.899809] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.status_code_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.899968] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.status_code_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.900139] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.900320] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.900482] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] cyborg.version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.900654] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.asyncio_connection = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.900816] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.asyncio_slave_connection = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.900988] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.backend = sqlalchemy {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.901171] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.connection = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.901336] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.connection_debug = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.901505] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.connection_parameters = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.901703] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.connection_recycle_time = 3600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.901872] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.connection_trace = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.902048] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.db_inc_retry_interval = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.902216] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.db_max_retries = 20 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.902377] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.db_max_retry_interval = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.902562] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.db_retry_interval = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.902774] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.max_overflow = 50 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.902945] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.max_pool_size = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.903123] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.max_retries = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.903297] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.903458] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.mysql_wsrep_sync_wait = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.903621] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.pool_timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.903784] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.retry_interval = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.903940] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.slave_connection = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.904115] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.sqlite_synchronous = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.904293] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] database.use_db_reconnect = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.904443] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.asyncio_connection = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.904601] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.asyncio_slave_connection = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.904772] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.backend = sqlalchemy {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.904938] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.connection = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.905116] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.connection_debug = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.905287] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.connection_parameters = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.905449] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.connection_recycle_time = 3600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.905609] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.connection_trace = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.905781] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.db_inc_retry_interval = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.905942] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.db_max_retries = 20 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.906116] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.db_max_retry_interval = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.906316] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.db_retry_interval = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.906484] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.max_overflow = 50 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.906645] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.max_pool_size = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.906808] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.max_retries = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.906977] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.907153] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.907312] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.pool_timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.907474] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.retry_interval = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.907631] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.slave_connection = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.907793] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] api_database.sqlite_synchronous = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.907969] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] devices.enabled_mdev_types = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.908160] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.908337] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.908491] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ephemeral_storage_encryption.enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.908651] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.908825] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.api_servers = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.908989] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.909166] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.909327] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.909487] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.connect_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.909640] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.connect_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.909800] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.debug = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.909963] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.default_trusted_certificate_ids = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.910134] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.enable_certificate_validation = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.910296] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.enable_rbd_download = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.910454] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.endpoint_override = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.910616] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.910780] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.910934] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.max_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.911100] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.min_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.911264] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.num_retries = 3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.911432] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.rbd_ceph_conf = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.911624] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.rbd_connect_timeout = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.911812] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.rbd_pool = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.911984] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.rbd_user = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.912163] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.region_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.912320] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.retriable_status_codes = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.912481] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.service_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.912717] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.service_type = image {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.912904] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.913080] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.status_code_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.913244] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.status_code_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.913400] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.913581] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.913745] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.verify_glance_signatures = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.913902] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] glance.version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.914081] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] guestfs.debug = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.914269] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.auth_section = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.914435] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.auth_type = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.914594] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.914754] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.914916] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.915088] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.connect_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.915252] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.connect_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.915412] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.endpoint_override = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.915573] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.915732] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.915898] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.max_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.916165] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.min_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.916353] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.region_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.916520] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.retriable_status_codes = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.916684] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.service_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.916855] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.service_type = shared-file-system {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.917033] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.share_apply_policy_timeout = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.917207] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.917367] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.status_code_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.917527] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.status_code_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.917685] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.917866] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.918036] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] manila.version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.918234] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] mks.enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.918603] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.918796] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] image_cache.manager_interval = 2400 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.919036] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] image_cache.precache_concurrency = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.919244] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] image_cache.remove_unused_base_images = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.919419] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.919587] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.919768] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] image_cache.subdirectory_name = _base {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.919942] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.api_max_retries = 60 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.920122] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.api_retry_interval = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.920284] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.auth_section = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.920443] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.auth_type = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.920601] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.920759] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.920921] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.921097] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.conductor_group = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.921260] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.connect_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.921420] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.connect_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.921602] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.endpoint_override = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.921776] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.921944] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.922219] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.max_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.922410] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.min_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.922585] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.peer_list = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.922749] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.region_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.922911] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.retriable_status_codes = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.923087] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.serial_console_state_timeout = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.923254] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.service_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.923426] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.service_type = baremetal {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.923587] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.shard = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.923755] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.923915] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.status_code_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.924086] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.status_code_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.924248] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.924435] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.924593] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ironic.version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.924776] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.924950] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] key_manager.fixed_key = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.925157] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.925322] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.barbican_api_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.925482] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.barbican_endpoint = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.925653] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.barbican_endpoint_type = public {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.925818] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.barbican_region_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.925977] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.926152] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.926316] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.926478] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.926638] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.926802] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.number_of_retries = 60 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.926963] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.retry_delay = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.927139] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.send_service_user_token = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.927303] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.927461] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.927622] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.verify_ssl = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.927783] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican.verify_ssl_path = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.927950] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican_service_user.auth_section = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.928124] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican_service_user.auth_type = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.928287] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican_service_user.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.928445] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican_service_user.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.928610] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican_service_user.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.928772] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican_service_user.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.928932] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican_service_user.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.929114] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican_service_user.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.929277] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] barbican_service_user.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.929446] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.approle_role_id = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.929604] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.approle_secret_id = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.929777] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.kv_mountpoint = secret {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.929939] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.kv_path = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.930116] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.kv_version = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.930337] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.namespace = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.930524] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.root_token_id = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.930690] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.ssl_ca_crt_file = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.930860] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.timeout = 60.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.931037] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.use_ssl = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.931216] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.931386] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.931574] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.931738] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.931899] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.connect_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.932069] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.connect_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.932232] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.endpoint_override = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.932395] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.932552] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.932743] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.max_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.932915] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.min_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.933091] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.region_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.933259] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.retriable_status_codes = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.933419] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.service_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.933590] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.service_type = identity {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.933754] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.933910] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.status_code_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.934079] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.status_code_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.934239] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.934418] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.934615] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] keystone.version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.934764] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.ceph_mount_options = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.935185] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.935374] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.connection_uri = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.935542] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.cpu_mode = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.935747] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.935960] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.cpu_models = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.936155] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.cpu_power_governor_high = performance {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.936329] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.936492] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.cpu_power_management = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.936660] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.936833] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.device_detach_attempts = 8 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.936996] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.device_detach_timeout = 20 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.937179] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.disk_cachemodes = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.937342] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.disk_prefix = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.937505] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.enabled_perf_events = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.937668] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.file_backed_memory = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.937836] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.gid_maps = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.937994] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.hw_disk_discard = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.938165] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.hw_machine_type = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.938334] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.images_rbd_ceph_conf = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.938494] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.938654] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.938868] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.images_rbd_glance_store_name = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.939112] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.images_rbd_pool = rbd {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.939300] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.images_type = default {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.939465] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.images_volume_group = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.939630] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.inject_key = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.939795] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.inject_partition = -2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.939956] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.inject_password = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.940131] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.iscsi_iface = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.940292] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.iser_use_multipath = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.940455] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.940617] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.940781] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_downtime = 500 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.940942] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.941115] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.941277] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_inbound_addr = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.941438] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.941630] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.941801] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_scheme = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.941974] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_timeout_action = abort {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.942153] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_tunnelled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.942315] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_uri = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.942480] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.live_migration_with_native_tls = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.942647] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.max_queues = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.942814] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.943049] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.943218] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.nfs_mount_options = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.943501] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.943677] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.943855] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.944045] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.944221] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.944385] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.num_pcie_ports = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.944554] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.944724] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.pmem_namespaces = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.944885] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.quobyte_client_cfg = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.945184] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.945363] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.945529] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.945697] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.945857] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rbd_secret_uuid = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.946024] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rbd_user = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.946193] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.946364] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.946525] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rescue_image_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.946685] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rescue_kernel_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.946842] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rescue_ramdisk_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.947014] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.947187] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.rx_queue_size = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.947357] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.smbfs_mount_options = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.947657] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.947834] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.snapshot_compression = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.947998] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.snapshot_image_format = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.948246] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.948416] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.sparse_logical_volumes = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.948580] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.swtpm_enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.948755] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.swtpm_group = tss {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.948932] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.swtpm_user = tss {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.949123] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.sysinfo_serial = unique {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.949289] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.tb_cache_size = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.949449] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.tx_queue_size = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.949616] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.uid_maps = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.949781] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.use_virtio_for_bridges = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.949956] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.virt_type = kvm {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.950144] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.volume_clear = zero {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.950309] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.volume_clear_size = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.950543] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.volume_enforce_multipath = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.950747] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.volume_use_multipath = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.950913] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.vzstorage_cache_path = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.951099] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.951278] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.951446] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.951647] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.951959] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.952159] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.vzstorage_mount_user = stack {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.952335] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.952535] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.auth_section = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.952705] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.auth_type = password {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.952874] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.953048] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.953218] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.953378] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.connect_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.953538] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.connect_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.953711] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.default_floating_pool = public {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.953893] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.endpoint_override = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.954090] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.extension_sync_interval = 600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.954264] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.http_retries = 3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.954432] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.954595] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.954762] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.max_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.954930] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.955103] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.min_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.955276] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.ovs_bridge = br-int {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.955445] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.physnets = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.955616] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.region_name = RegionOne {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.955777] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.retriable_status_codes = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.955948] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.service_metadata_proxy = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.956123] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.service_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.956296] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.service_type = network {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.956461] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.956622] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.status_code_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.956785] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.status_code_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.956946] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.957142] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.957309] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] neutron.version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.957482] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] notifications.bdms_in_notifications = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.957661] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] notifications.default_level = INFO {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.957831] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] notifications.include_share_mapping = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.958012] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] notifications.notification_format = unversioned {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.958186] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] notifications.notify_on_state_change = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.958364] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.958543] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] pci.alias = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.958718] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] pci.device_spec = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.958884] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] pci.report_in_placement = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.959069] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.auth_section = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.959249] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.auth_type = password {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.959423] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.959584] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.959745] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.959908] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.960078] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.connect_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.960239] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.connect_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.960400] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.default_domain_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.960556] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.default_domain_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.960763] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.domain_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.960934] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.domain_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.961109] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.endpoint_override = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.961276] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.961437] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.961630] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.max_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.961802] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.min_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.961974] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.password = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.962151] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.project_domain_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.962319] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.project_domain_name = Default {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.962490] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.project_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.962666] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.project_name = service {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.962839] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.region_name = RegionOne {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.963008] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.retriable_status_codes = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.963179] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.service_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.963350] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.service_type = placement {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.963516] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.963674] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.status_code_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.963845] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.status_code_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.964034] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.system_scope = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.964200] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.964360] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.trust_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.964518] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.user_domain_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.964686] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.user_domain_name = Default {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.964843] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.user_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.965028] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.username = nova {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.965215] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.965378] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] placement.version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.965563] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.cores = 20 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.965730] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.count_usage_from_placement = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.965905] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.966145] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.injected_file_content_bytes = 10240 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.966330] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.injected_file_path_length = 255 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.966498] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.injected_files = 5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.966666] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.instances = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.966901] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.key_pairs = 100 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.967105] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.metadata_items = 128 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.967280] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.ram = 51200 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.967447] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.recheck_quota = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.967617] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.server_group_members = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.967783] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.server_groups = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.967997] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.968190] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] quota.unified_limits_resource_strategy = require {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.968366] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.968531] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.968695] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.image_metadata_prefilter = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.968858] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.969031] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.max_attempts = 3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.969203] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.max_placement_results = 1000 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.969376] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.969540] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.969700] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.969873] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] scheduler.workers = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.970065] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.970242] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.970421] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.970589] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.970755] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.970919] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.971095] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.971283] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.971450] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.host_subset_size = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.971648] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.971822] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.971986] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.972164] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.972341] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.972508] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.isolated_hosts = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.972729] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.isolated_images = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.972860] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.973033] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.973200] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.973363] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.pci_in_placement = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.973524] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.973687] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.973853] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.974051] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.974223] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.974384] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.974543] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.track_instance_changes = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.974719] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.974889] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] metrics.required = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.975068] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] metrics.weight_multiplier = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.975235] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.975401] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] metrics.weight_setting = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.975731] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.975910] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] serial_console.enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.976100] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] serial_console.port_range = 10000:20000 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.976275] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.976453] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.976623] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] serial_console.serialproxy_port = 6083 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.976793] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.auth_section = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.976966] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.auth_type = password {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.977139] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.977299] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.977460] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.977618] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.977777] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.977948] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.send_service_user_token = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.978125] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.978284] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] service_user.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.978453] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.agent_enabled = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.978614] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.978917] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.979141] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.979317] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.html5proxy_port = 6082 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.979479] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.image_compression = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.979638] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.jpeg_compression = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.979799] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.playback_compression = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.979962] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.require_secure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.980151] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.server_listen = 127.0.0.1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.980323] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.980603] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.980778] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.streaming_mode = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.980942] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] spice.zlib_compression = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.981124] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] upgrade_levels.baseapi = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.981300] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] upgrade_levels.compute = auto {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.981461] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] upgrade_levels.conductor = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.981655] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] upgrade_levels.scheduler = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.981830] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.981995] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.982171] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vendordata_dynamic_auth.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.982331] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vendordata_dynamic_auth.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.982494] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.982675] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vendordata_dynamic_auth.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.982839] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.983010] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.983175] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vendordata_dynamic_auth.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.983349] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.api_retry_count = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.983511] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.ca_file = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.983684] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.983865] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.cluster_name = testcl1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.984060] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.connection_pool_size = 10 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.984228] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.console_delay_seconds = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.984399] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.datastore_regex = ^datastore.* {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.984607] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.984784] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.host_password = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.984949] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.host_port = 443 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.985141] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.host_username = administrator@vsphere.local {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.985298] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.insecure = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.985460] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.integration_bridge = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.985624] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.maximum_objects = 100 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.985786] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.pbm_default_policy = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.985945] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.pbm_enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.986117] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.pbm_wsdl_location = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.986288] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.986445] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.serial_port_proxy_uri = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.986603] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.serial_port_service_uri = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.986769] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.task_poll_interval = 0.5 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.986937] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.use_linked_clone = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.987119] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.vnc_keymap = en-us {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.987288] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.vnc_port = 5900 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.987451] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vmware.vnc_port_total = 10000 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.987636] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.auth_schemes = ['none'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.987815] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.988114] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.988303] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.988476] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.novncproxy_port = 6080 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.988660] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.server_listen = 127.0.0.1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.988844] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.989021] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.vencrypt_ca_certs = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.989181] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.vencrypt_client_cert = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.989340] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vnc.vencrypt_client_key = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.989522] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.989687] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.disable_deep_image_inspection = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.989846] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.990013] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.990180] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.990341] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.disable_rootwrap = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.990502] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.enable_numa_live_migration = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.990667] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.990826] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.990986] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.991161] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.libvirt_disable_apic = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.991322] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.991486] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.991828] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.992015] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.992189] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.992355] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.992525] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.992728] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.992923] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.993073] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.993264] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.993437] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.client_socket_timeout = 900 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.993607] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.default_pool_size = 1000 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.993777] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.keep_alive = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.993971] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.max_header_line = 16384 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.994164] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.994332] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.ssl_ca_file = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.994496] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.ssl_cert_file = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.994657] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.ssl_key_file = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.994825] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.tcp_keepidle = 600 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.995008] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.995234] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] zvm.ca_file = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.995353] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] zvm.cloud_connector_url = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.995649] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.995898] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] zvm.reachable_timeout = 300 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.996127] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.996317] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.996498] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.connection_string = messaging:// {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.996668] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.enabled = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.996843] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.es_doc_type = notification {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.997015] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.es_scroll_size = 10000 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.997193] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.es_scroll_time = 2m {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.997356] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.filter_error_trace = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.997523] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.hmac_keys = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.997690] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.sentinel_service_name = mymaster {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.997855] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.socket_timeout = 0.1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.998023] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.trace_requests = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.998187] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler.trace_sqlalchemy = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.998370] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler_jaeger.process_tags = {} {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.998534] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler_jaeger.service_name_prefix = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.998700] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] profiler_otlp.service_name_prefix = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.998864] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] remote_debug.host = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.999032] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] remote_debug.port = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.999214] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.999376] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.999539] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.999703] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 606.999863] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.000031] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.000195] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.000354] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.000513] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.000682] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.000844] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.001017] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.001188] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.001351] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.001513] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.001717] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.001887] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.002060] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.002232] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.002393] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.002586] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.002786] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.002958] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.003139] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.003303] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.003463] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.003622] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.003783] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.003941] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.004112] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.004275] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.ssl = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.004443] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.004607] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.004766] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.004931] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.005109] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.005272] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.005453] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.005618] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_notifications.retry = -1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.005793] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.005961] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.006154] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.auth_section = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.006319] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.auth_type = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.006479] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.cafile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.006635] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.certfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.006798] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.collect_timing = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.006953] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.connect_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.007124] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.connect_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.007282] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.endpoint_id = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.007457] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.endpoint_interface = publicURL {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.007619] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.endpoint_override = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.007787] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.endpoint_region_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.007950] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.endpoint_service_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.008121] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.endpoint_service_type = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.008284] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.insecure = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.008442] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.keyfile = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.008600] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.max_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.008757] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.min_version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.008914] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.region_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.009084] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.retriable_status_codes = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.009246] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.service_name = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.009402] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.service_type = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.009562] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.split_loggers = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.009718] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.status_code_retries = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.009875] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.status_code_retry_delay = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.010042] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.timeout = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.010203] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.valid_interfaces = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.010358] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_limit.version = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.010523] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_reports.file_event_handler = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.010688] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.010846] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] oslo_reports.log_dir = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.011026] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.011192] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.011352] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.011524] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.011709] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.011871] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.012051] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.012216] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_ovs_privileged.group = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.012377] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.012561] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.012738] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.012899] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] vif_plug_ovs_privileged.user = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.013082] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.013268] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.013443] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.013617] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.013794] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.013969] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.014153] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.014317] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.014496] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.014693] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_ovs.isolate_vif = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.014887] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.015069] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.015246] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.015422] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.015584] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] os_vif_ovs.per_port_bridge = False {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.015754] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] privsep_osbrick.capabilities = [21] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.015915] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] privsep_osbrick.group = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.016084] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] privsep_osbrick.helper_command = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.016251] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.016417] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.016574] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] privsep_osbrick.user = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.016747] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.016906] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] nova_sys_admin.group = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.017075] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] nova_sys_admin.helper_command = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.017244] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.017406] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.017562] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] nova_sys_admin.user = None {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 607.017696] env[68437]: DEBUG oslo_service.backend.eventlet.service [None req-2265a432-560f-4af5-9adb-d5b8154641d2 None None] ******************************************************************************** {{(pid=68437) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 607.018126] env[68437]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 607.521937] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Getting list of instances from cluster (obj){ [ 607.521937] env[68437]: value = "domain-c8" [ 607.521937] env[68437]: _type = "ClusterComputeResource" [ 607.521937] env[68437]: } {{(pid=68437) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 607.523119] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d020466-0c64-4bd9-8fa8-a7fa9c859871 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.532676] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Got total of 0 instances {{(pid=68437) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 607.533141] env[68437]: WARNING nova.virt.vmwareapi.driver [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 607.533609] env[68437]: INFO nova.virt.node [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Generated node identity 422e986f-b38b-46ad-94b3-91f3ccd10a05 [ 607.533847] env[68437]: INFO nova.virt.node [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Wrote node identity 422e986f-b38b-46ad-94b3-91f3ccd10a05 to /opt/stack/data/n-cpu-1/compute_id [ 608.036445] env[68437]: WARNING nova.compute.manager [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Compute nodes ['422e986f-b38b-46ad-94b3-91f3ccd10a05'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 609.042266] env[68437]: INFO nova.compute.manager [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 610.049106] env[68437]: WARNING nova.compute.manager [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 610.049106] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.049106] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.049106] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.049579] env[68437]: DEBUG nova.compute.resource_tracker [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 610.050189] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f46899-3483-4a6d-8dd8-c81ef54078b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.058582] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e32c14e-5255-414d-80e1-9407a3bc949c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.072661] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6bf277-9c6a-4c1f-9968-013b54843dc2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.078843] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b24a0b-b73a-4891-897f-0eadd63e005d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.106559] env[68437]: DEBUG nova.compute.resource_tracker [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181107MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 610.106700] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.106982] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.609937] env[68437]: WARNING nova.compute.resource_tracker [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] No compute node record for cpu-1:422e986f-b38b-46ad-94b3-91f3ccd10a05: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 422e986f-b38b-46ad-94b3-91f3ccd10a05 could not be found. [ 611.113459] env[68437]: INFO nova.compute.resource_tracker [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 422e986f-b38b-46ad-94b3-91f3ccd10a05 [ 612.621829] env[68437]: DEBUG nova.compute.resource_tracker [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 612.622216] env[68437]: DEBUG nova.compute.resource_tracker [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 612.894264] env[68437]: INFO nova.scheduler.client.report [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] [req-65971bcf-5ce4-4744-afcf-fc6444a2002e] Created resource provider record via placement API for resource provider with UUID 422e986f-b38b-46ad-94b3-91f3ccd10a05 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 612.914122] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489bb20d-4542-4454-b38a-779a0935c119 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.920705] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714f4ec3-9fb4-46fc-bd16-1d7c889148d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.950986] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961df8a4-2987-4bde-aba2-6d72fcb37710 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.958723] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31154df2-60c1-4606-92d7-b1135dfa4e8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.971936] env[68437]: DEBUG nova.compute.provider_tree [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 613.509941] env[68437]: DEBUG nova.scheduler.client.report [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Updated inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 613.509941] env[68437]: DEBUG nova.compute.provider_tree [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 0 to 1 during operation: update_inventory {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 613.509941] env[68437]: DEBUG nova.compute.provider_tree [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 613.556115] env[68437]: DEBUG nova.compute.provider_tree [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 1 to 2 during operation: update_traits {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 614.061565] env[68437]: DEBUG nova.compute.resource_tracker [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 614.061565] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.954s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.061565] env[68437]: DEBUG nova.service [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Creating RPC server for service compute {{(pid=68437) start /opt/stack/nova/nova/service.py:186}} [ 614.074991] env[68437]: DEBUG nova.service [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] Join ServiceGroup membership for this service compute {{(pid=68437) start /opt/stack/nova/nova/service.py:203}} [ 614.074991] env[68437]: DEBUG nova.servicegroup.drivers.db [None req-3113a2c2-363d-42ce-bfd5-3aa3e754873e None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68437) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 635.076576] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 635.580132] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Getting list of instances from cluster (obj){ [ 635.580132] env[68437]: value = "domain-c8" [ 635.580132] env[68437]: _type = "ClusterComputeResource" [ 635.580132] env[68437]: } {{(pid=68437) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 635.581351] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d081f6d-cdb6-4798-a098-a1c60727a00f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.589598] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Got total of 0 instances {{(pid=68437) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 635.589797] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 635.590100] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Getting list of instances from cluster (obj){ [ 635.590100] env[68437]: value = "domain-c8" [ 635.590100] env[68437]: _type = "ClusterComputeResource" [ 635.590100] env[68437]: } {{(pid=68437) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 635.590897] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16386c21-831d-4fe3-ad97-b13b609b5641 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.597801] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Got total of 0 instances {{(pid=68437) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 653.443905] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.444909] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.948286] env[68437]: DEBUG nova.compute.manager [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 654.241777] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "0484ccee-f003-4101-87c5-fed92f095d2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.242582] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "0484ccee-f003-4101-87c5-fed92f095d2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.512178] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.512178] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.512178] env[68437]: INFO nova.compute.claims [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.748077] env[68437]: DEBUG nova.compute.manager [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 655.126300] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquiring lock "180f77ab-e468-410d-8e41-20291487ef5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.128763] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "180f77ab-e468-410d-8e41-20291487ef5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.284334] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.619043] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ac2aea-4f61-4f17-935a-6638248b3363 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.630169] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df65aa9f-ed8b-4272-998c-079b3860203a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.631655] env[68437]: DEBUG nova.compute.manager [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 655.664511] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce8e0d5-ba33-4a24-a746-4fbde5e2ace9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.672947] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f325155-e446-4a49-a3a5-efaa0ec18cca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.686790] env[68437]: DEBUG nova.compute.provider_tree [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.005388] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquiring lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.005918] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.192875] env[68437]: DEBUG nova.scheduler.client.report [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 656.201323] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.509045] env[68437]: DEBUG nova.compute.manager [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.704310] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.194s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.704600] env[68437]: DEBUG nova.compute.manager [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 656.709759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.424s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.712301] env[68437]: INFO nova.compute.claims [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.039787] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.071874] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "45595615-59c0-4c59-b18c-b49a3126dbb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.072152] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "45595615-59c0-4c59-b18c-b49a3126dbb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.218477] env[68437]: DEBUG nova.compute.utils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 657.219938] env[68437]: DEBUG nova.compute.manager [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 657.222103] env[68437]: DEBUG nova.network.neutron [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 657.575645] env[68437]: DEBUG nova.compute.manager [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 657.604734] env[68437]: DEBUG nova.policy [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7e2c2d7b1d748e5b96fd49fd1285112', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5035a40e44f549e7add64045ef3ab722', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 657.731356] env[68437]: DEBUG nova.compute.manager [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 657.871062] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567537da-7724-4cbf-911a-c580f7960bce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.881998] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c31ec41-481d-4d6f-8971-5e32bd5411cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.922776] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12545d5-d91d-49d1-b29e-bbae7227e778 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.933312] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1c51ab-9d14-42a0-92fd-ef0d985f66ec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.954329] env[68437]: DEBUG nova.compute.provider_tree [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.070056] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "27c18765-38cf-41d6-9139-9acffa94fbe6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.070315] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.110116] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.459959] env[68437]: DEBUG nova.scheduler.client.report [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 658.575767] env[68437]: DEBUG nova.compute.manager [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 658.743508] env[68437]: DEBUG nova.compute.manager [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 658.786392] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 658.787119] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.787119] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 658.787119] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.788205] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 658.788460] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 658.788690] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 658.788902] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 658.789349] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 658.789525] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 658.789696] env[68437]: DEBUG nova.virt.hardware [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 658.790629] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43afcae7-1bf1-43a4-a876-0932e37d35f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.802766] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae1c37a-aa3d-45fb-94b4-dc08447f6155 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.824557] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef70aa3d-4736-44ff-b260-58b18df92490 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.966206] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.966846] env[68437]: DEBUG nova.compute.manager [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 658.973635] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.772s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 658.975157] env[68437]: INFO nova.compute.claims [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.104717] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.320720] env[68437]: DEBUG nova.network.neutron [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Successfully created port: 9e8115c0-b1ad-464a-9628-a7845a89de10 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.475311] env[68437]: DEBUG nova.compute.utils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 659.479900] env[68437]: DEBUG nova.compute.manager [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 659.480211] env[68437]: DEBUG nova.network.neutron [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 659.649854] env[68437]: DEBUG nova.policy [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7536b751d42c4b5889c055d32268f93c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '06bbda421d194770bfc1b9624522a665', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 659.985546] env[68437]: DEBUG nova.compute.manager [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 660.139500] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c0ff62-598f-4f8b-a66c-3c8c5da0041b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.148605] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337c146d-59d4-4037-8f71-5949b83028d6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.191514] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39abba67-e215-45dc-b602-5b87178d2042 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.199788] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1d3c10-fd32-4206-84bb-06952615d3b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.218969] env[68437]: DEBUG nova.compute.provider_tree [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.300742] env[68437]: DEBUG nova.network.neutron [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Successfully created port: 037e71f7-843b-4af0-ad1e-1289fc16a69b {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.723942] env[68437]: DEBUG nova.scheduler.client.report [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 661.001474] env[68437]: DEBUG nova.compute.manager [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 661.042371] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 661.044928] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 661.044928] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 661.047575] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 661.047695] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 661.047853] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 661.048088] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 661.048419] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 661.048419] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 661.048887] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 661.048887] env[68437]: DEBUG nova.virt.hardware [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 661.049679] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cf876b-9404-44f0-9e84-074d00dc5d11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.063992] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c009e5-a4b4-4444-b78d-9d47f021f304 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.232816] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.232816] env[68437]: DEBUG nova.compute.manager [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 661.235795] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.196s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.238134] env[68437]: INFO nova.compute.claims [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 661.744853] env[68437]: DEBUG nova.compute.utils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 661.753087] env[68437]: DEBUG nova.compute.manager [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 661.753290] env[68437]: DEBUG nova.network.neutron [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 661.792469] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "ce8fd88b-249b-4fee-80fc-35b795d24658" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.795318] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.847168] env[68437]: DEBUG nova.policy [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2f590291b044cc3b5b66e7474c9a755', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '78fa0b9c26b04821bf43772290f609df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 662.239313] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.239724] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.239909] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.240193] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.240415] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.241390] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.242672] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.242835] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 662.243032] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 662.255177] env[68437]: DEBUG nova.compute.manager [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 662.299362] env[68437]: DEBUG nova.compute.manager [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 662.424091] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fc55dd-cb5c-451c-99c6-aef1228849cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.432341] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b0ebd5-c104-4c7c-a05b-1fd0c6227f6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.464011] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ebeced-885a-46cb-b3b7-b5318ccb25c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.476979] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f24df2-b837-40ac-9ace-f352b9da23cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.491702] env[68437]: DEBUG nova.compute.provider_tree [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.609759] env[68437]: DEBUG nova.network.neutron [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Successfully created port: 803f314e-2d1c-448a-ae52-ae285d8689cb {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.746679] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.821310] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.885123] env[68437]: DEBUG nova.network.neutron [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Successfully updated port: 037e71f7-843b-4af0-ad1e-1289fc16a69b {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 662.997413] env[68437]: DEBUG nova.scheduler.client.report [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 663.275227] env[68437]: DEBUG nova.compute.manager [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 663.321459] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 663.321459] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.321459] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 663.321599] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.321599] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 663.321599] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 663.321599] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 663.322574] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 663.322943] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 663.323234] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 663.323615] env[68437]: DEBUG nova.virt.hardware [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 663.326032] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fab0ded-819f-4457-a016-818cb87dbe15 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.333327] env[68437]: DEBUG nova.network.neutron [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Successfully updated port: 9e8115c0-b1ad-464a-9628-a7845a89de10 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 663.342806] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139f9263-3f90-4093-8437-7766d1edef2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.387573] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "refresh_cache-0484ccee-f003-4101-87c5-fed92f095d2d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.387704] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquired lock "refresh_cache-0484ccee-f003-4101-87c5-fed92f095d2d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.387853] env[68437]: DEBUG nova.network.neutron [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 663.507714] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.272s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.508382] env[68437]: DEBUG nova.compute.manager [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 663.511596] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.401s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.516325] env[68437]: INFO nova.compute.claims [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.840814] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "refresh_cache-df3fbf16-d3d9-4138-b563-6ea09dd233b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.840865] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquired lock "refresh_cache-df3fbf16-d3d9-4138-b563-6ea09dd233b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.841154] env[68437]: DEBUG nova.network.neutron [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 663.975988] env[68437]: DEBUG nova.network.neutron [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 664.022468] env[68437]: DEBUG nova.compute.utils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 664.029974] env[68437]: DEBUG nova.compute.manager [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 664.029974] env[68437]: DEBUG nova.network.neutron [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 664.116026] env[68437]: DEBUG nova.policy [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4f4e8815538e4c10bf88b84e53353ac3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c89cb01775f48a0ad00891a9ab90909', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 664.337096] env[68437]: DEBUG nova.compute.manager [req-6a4ce1ad-4738-4a12-8d08-0e862de8ade4 req-fcf24734-0c25-4487-95c2-da009e81c8ff service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Received event network-vif-plugged-037e71f7-843b-4af0-ad1e-1289fc16a69b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 664.337096] env[68437]: DEBUG oslo_concurrency.lockutils [req-6a4ce1ad-4738-4a12-8d08-0e862de8ade4 req-fcf24734-0c25-4487-95c2-da009e81c8ff service nova] Acquiring lock "0484ccee-f003-4101-87c5-fed92f095d2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.337096] env[68437]: DEBUG oslo_concurrency.lockutils [req-6a4ce1ad-4738-4a12-8d08-0e862de8ade4 req-fcf24734-0c25-4487-95c2-da009e81c8ff service nova] Lock "0484ccee-f003-4101-87c5-fed92f095d2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.337096] env[68437]: DEBUG oslo_concurrency.lockutils [req-6a4ce1ad-4738-4a12-8d08-0e862de8ade4 req-fcf24734-0c25-4487-95c2-da009e81c8ff service nova] Lock "0484ccee-f003-4101-87c5-fed92f095d2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.337096] env[68437]: DEBUG nova.compute.manager [req-6a4ce1ad-4738-4a12-8d08-0e862de8ade4 req-fcf24734-0c25-4487-95c2-da009e81c8ff service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] No waiting events found dispatching network-vif-plugged-037e71f7-843b-4af0-ad1e-1289fc16a69b {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 664.337464] env[68437]: WARNING nova.compute.manager [req-6a4ce1ad-4738-4a12-8d08-0e862de8ade4 req-fcf24734-0c25-4487-95c2-da009e81c8ff service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Received unexpected event network-vif-plugged-037e71f7-843b-4af0-ad1e-1289fc16a69b for instance with vm_state building and task_state spawning. [ 664.342613] env[68437]: DEBUG nova.network.neutron [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Updating instance_info_cache with network_info: [{"id": "037e71f7-843b-4af0-ad1e-1289fc16a69b", "address": "fa:16:3e:32:a2:64", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e71f7-84", "ovs_interfaceid": "037e71f7-843b-4af0-ad1e-1289fc16a69b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.357231] env[68437]: DEBUG nova.compute.manager [req-79c3c121-a4ac-4ef6-8dbf-938922ea9f2c req-fdc516da-becb-4ae8-a4c3-7fdf17fbf668 service nova] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Received event network-vif-plugged-9e8115c0-b1ad-464a-9628-a7845a89de10 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 664.357231] env[68437]: DEBUG oslo_concurrency.lockutils [req-79c3c121-a4ac-4ef6-8dbf-938922ea9f2c req-fdc516da-becb-4ae8-a4c3-7fdf17fbf668 service nova] Acquiring lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.357231] env[68437]: DEBUG oslo_concurrency.lockutils [req-79c3c121-a4ac-4ef6-8dbf-938922ea9f2c req-fdc516da-becb-4ae8-a4c3-7fdf17fbf668 service nova] Lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.357231] env[68437]: DEBUG oslo_concurrency.lockutils [req-79c3c121-a4ac-4ef6-8dbf-938922ea9f2c req-fdc516da-becb-4ae8-a4c3-7fdf17fbf668 service nova] Lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.357231] env[68437]: DEBUG nova.compute.manager [req-79c3c121-a4ac-4ef6-8dbf-938922ea9f2c req-fdc516da-becb-4ae8-a4c3-7fdf17fbf668 service nova] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] No waiting events found dispatching network-vif-plugged-9e8115c0-b1ad-464a-9628-a7845a89de10 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 664.357496] env[68437]: WARNING nova.compute.manager [req-79c3c121-a4ac-4ef6-8dbf-938922ea9f2c req-fdc516da-becb-4ae8-a4c3-7fdf17fbf668 service nova] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Received unexpected event network-vif-plugged-9e8115c0-b1ad-464a-9628-a7845a89de10 for instance with vm_state building and task_state spawning. [ 664.420010] env[68437]: DEBUG nova.network.neutron [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 664.528866] env[68437]: DEBUG nova.compute.manager [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 664.680272] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650d771c-1248-4fec-9399-e03d543cadc7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.694409] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3445187c-0cde-4eff-a50b-df074947aac3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.733594] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f306035e-71d6-4134-85bd-ba451cf52669 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.741734] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec5c6b5-1abd-4bc1-bc74-65e57f80504e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.755525] env[68437]: DEBUG nova.compute.provider_tree [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.792957] env[68437]: DEBUG nova.network.neutron [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Updating instance_info_cache with network_info: [{"id": "9e8115c0-b1ad-464a-9628-a7845a89de10", "address": "fa:16:3e:09:e8:c9", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8115c0-b1", "ovs_interfaceid": "9e8115c0-b1ad-464a-9628-a7845a89de10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.845562] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Releasing lock "refresh_cache-0484ccee-f003-4101-87c5-fed92f095d2d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.845562] env[68437]: DEBUG nova.compute.manager [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Instance network_info: |[{"id": "037e71f7-843b-4af0-ad1e-1289fc16a69b", "address": "fa:16:3e:32:a2:64", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e71f7-84", "ovs_interfaceid": "037e71f7-843b-4af0-ad1e-1289fc16a69b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 664.845827] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:a2:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '037e71f7-843b-4af0-ad1e-1289fc16a69b', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.860735] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.861049] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41484089-271b-496f-8257-9e43d1c643c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.873870] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Created folder: OpenStack in parent group-v4. [ 664.874187] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Creating folder: Project (06bbda421d194770bfc1b9624522a665). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.874323] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b40218d-dc81-4946-ba25-c165e0583e18 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.884498] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Created folder: Project (06bbda421d194770bfc1b9624522a665) in parent group-v590848. [ 664.884725] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Creating folder: Instances. Parent ref: group-v590849. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 664.885011] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2026948-c3f6-4105-ba08-d56ac50d5680 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.894183] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Created folder: Instances in parent group-v590849. [ 664.894432] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 664.894627] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 664.894817] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae93a2ba-8ee8-40f2-91c3-c798bc8cb777 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.918211] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.918211] env[68437]: value = "task-2943580" [ 664.918211] env[68437]: _type = "Task" [ 664.918211] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.928768] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943580, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.259561] env[68437]: DEBUG nova.scheduler.client.report [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 665.295963] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Releasing lock "refresh_cache-df3fbf16-d3d9-4138-b563-6ea09dd233b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.296241] env[68437]: DEBUG nova.compute.manager [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Instance network_info: |[{"id": "9e8115c0-b1ad-464a-9628-a7845a89de10", "address": "fa:16:3e:09:e8:c9", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8115c0-b1", "ovs_interfaceid": "9e8115c0-b1ad-464a-9628-a7845a89de10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 665.296658] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:e8:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e8115c0-b1ad-464a-9628-a7845a89de10', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 665.307275] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Creating folder: Project (5035a40e44f549e7add64045ef3ab722). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 665.308485] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4290662f-52f7-4844-a4b9-a7adfedbbf80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.320135] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Created folder: Project (5035a40e44f549e7add64045ef3ab722) in parent group-v590848. [ 665.320507] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Creating folder: Instances. Parent ref: group-v590852. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 665.320899] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a567a8e7-a66d-48d0-ab21-4503457c7fc4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.330938] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Created folder: Instances in parent group-v590852. [ 665.331254] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 665.331508] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 665.331766] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b37c7c8e-5a94-4b1e-87bb-0de950f3233c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.352522] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 665.352522] env[68437]: value = "task-2943583" [ 665.352522] env[68437]: _type = "Task" [ 665.352522] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.361614] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943583, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.430377] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943580, 'name': CreateVM_Task, 'duration_secs': 0.352502} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.430912] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 665.458286] env[68437]: DEBUG oslo_vmware.service [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0481c0-f4c5-4ef0-84b0-263e0eeab856 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.466147] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.466316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.467051] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 665.467599] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-062e0dd3-5afb-4916-a1d3-4675c9f9c1dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.473951] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 665.473951] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ca7c84-6c7e-ede8-3886-8c100932670f" [ 665.473951] env[68437]: _type = "Task" [ 665.473951] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.487507] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ca7c84-6c7e-ede8-3886-8c100932670f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.488936] env[68437]: DEBUG nova.network.neutron [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Successfully updated port: 803f314e-2d1c-448a-ae52-ae285d8689cb {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.542748] env[68437]: DEBUG nova.compute.manager [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 665.576783] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 665.578674] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.579021] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 665.579341] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.579540] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 665.579705] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 665.579979] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 665.580198] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 665.580400] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 665.580628] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 665.580820] env[68437]: DEBUG nova.virt.hardware [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 665.581860] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7578bde5-42aa-4fdd-8b9a-e3867f7021bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.597106] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2021b07b-0fba-42c2-bbc3-9c38f3d026c7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.674286] env[68437]: DEBUG nova.network.neutron [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Successfully created port: 1856e5ed-7e1c-4837-a8bc-75086deed489 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.770149] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.770472] env[68437]: DEBUG nova.compute.manager [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 665.775807] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.669s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.777305] env[68437]: INFO nova.compute.claims [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.869502] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943583, 'name': CreateVM_Task, 'duration_secs': 0.352213} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.869502] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 665.869502] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.996249] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquiring lock "refresh_cache-180f77ab-e468-410d-8e41-20291487ef5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.996329] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquired lock "refresh_cache-180f77ab-e468-410d-8e41-20291487ef5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.996572] env[68437]: DEBUG nova.network.neutron [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 666.000255] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.000255] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 666.000255] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.002570] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.002570] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 666.002570] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.002570] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 666.002885] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-555e6895-305d-42d2-afea-fcf39fac5d4d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.005956] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ef48ff9-2174-4d23-a4b5-4841ef7a784a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.018991] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 666.018991] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52927ee5-5edd-fa19-a80a-c6f36a1df439" [ 666.018991] env[68437]: _type = "Task" [ 666.018991] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.033448] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52927ee5-5edd-fa19-a80a-c6f36a1df439, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.033705] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 666.033905] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 666.034668] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e578e7e2-9f41-4e9c-b05b-c7295fd4e2cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.046658] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b9ef980-733a-443d-a407-25e756674784 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.052639] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 666.052639] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52132480-9439-8fbe-e005-80e5f11c09ad" [ 666.052639] env[68437]: _type = "Task" [ 666.052639] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.060671] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52132480-9439-8fbe-e005-80e5f11c09ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.285646] env[68437]: DEBUG nova.compute.utils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 666.287419] env[68437]: DEBUG nova.compute.manager [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 666.287575] env[68437]: DEBUG nova.network.neutron [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 666.517107] env[68437]: DEBUG nova.policy [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '227473a85c3242229bd559b521dd0023', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d7b3b9e525e494d896b8d6e874c3e8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 666.531559] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.531559] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 666.531559] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.565618] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Preparing fetch location {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 666.565618] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Creating directory with path [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 666.565773] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b494fd5-79e7-4d5f-a2a7-8c869484329c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.607019] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Created directory with path [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 666.607019] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Fetch image to [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 666.607019] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Downloading image file data a272f526-6b8d-4a29-bd06-cd29ab5fabbe to [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk on the data store datastore1 {{(pid=68437) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 666.607019] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ce83f6-b305-44c0-989c-f7afe851009a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.622476] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3694131-71fc-4a6d-91eb-04f127f3a104 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.632153] env[68437]: DEBUG nova.network.neutron [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 666.635160] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da015bf5-c67c-4e6e-87a5-f8c8336bd11c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.667737] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9b4dad-672c-4890-9feb-885280354041 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.675131] env[68437]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c307c5e0-646f-419d-b548-44880a7d5740 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.777675] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Downloading image file data a272f526-6b8d-4a29-bd06-cd29ab5fabbe to the data store datastore1 {{(pid=68437) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 666.796234] env[68437]: DEBUG nova.compute.manager [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 666.870024] env[68437]: DEBUG oslo_vmware.rw_handles [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68437) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 667.018344] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd6bdd0-4445-4de3-a710-f626443f5d89 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.030667] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0828dd1f-ef03-495d-8e17-0c9721bfcc71 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.072218] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b26f7d-53cb-4e91-99c7-2dd80cf2b905 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.082020] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc8049d-2274-400e-8fdc-32bfeceed390 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.102291] env[68437]: DEBUG nova.compute.provider_tree [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.543163] env[68437]: DEBUG oslo_vmware.rw_handles [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Completed reading data from the image iterator. {{(pid=68437) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 667.543498] env[68437]: DEBUG oslo_vmware.rw_handles [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 667.565393] env[68437]: DEBUG nova.network.neutron [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Updating instance_info_cache with network_info: [{"id": "803f314e-2d1c-448a-ae52-ae285d8689cb", "address": "fa:16:3e:0a:02:00", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap803f314e-2d", "ovs_interfaceid": "803f314e-2d1c-448a-ae52-ae285d8689cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.609053] env[68437]: DEBUG nova.scheduler.client.report [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.648229] env[68437]: DEBUG nova.network.neutron [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Successfully created port: 095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.688624] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Downloaded image file data a272f526-6b8d-4a29-bd06-cd29ab5fabbe to vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk on the data store datastore1 {{(pid=68437) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 667.690336] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Caching image {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 667.690593] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Copying Virtual Disk [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk to [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 667.691371] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fb171ef-5f99-4201-8ac6-114ccda7661f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.700620] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 667.700620] env[68437]: value = "task-2943584" [ 667.700620] env[68437]: _type = "Task" [ 667.700620] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.709793] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943584, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.811639] env[68437]: DEBUG nova.compute.manager [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 667.852296] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 667.853126] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.853126] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 667.853126] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.853126] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 667.854249] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 667.854555] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 667.854731] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 667.854979] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 667.855091] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 667.856669] env[68437]: DEBUG nova.virt.hardware [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 667.858866] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711a11de-d8b9-4b72-bac3-5d726765a59a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.869802] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcea875-3e87-41ea-b506-07fcec98638c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.070252] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Releasing lock "refresh_cache-180f77ab-e468-410d-8e41-20291487ef5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.070605] env[68437]: DEBUG nova.compute.manager [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Instance network_info: |[{"id": "803f314e-2d1c-448a-ae52-ae285d8689cb", "address": "fa:16:3e:0a:02:00", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap803f314e-2d", "ovs_interfaceid": "803f314e-2d1c-448a-ae52-ae285d8689cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 668.071045] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:02:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '803f314e-2d1c-448a-ae52-ae285d8689cb', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.078588] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Creating folder: Project (78fa0b9c26b04821bf43772290f609df). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.081510] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c76dbc2a-54d3-4eff-bc97-2798355020e9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.090526] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Created folder: Project (78fa0b9c26b04821bf43772290f609df) in parent group-v590848. [ 668.090736] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Creating folder: Instances. Parent ref: group-v590855. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.092065] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc6cfebf-b80a-48b3-9430-320bf1c9abf8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.101521] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Created folder: Instances in parent group-v590855. [ 668.101521] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 668.102029] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 668.102029] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f266ca3-8ee8-4278-bfb9-9b7960578fcb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.124116] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.124477] env[68437]: DEBUG nova.compute.manager [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 668.128085] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.382s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.128275] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.128670] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 668.129012] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.308s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.130776] env[68437]: INFO nova.compute.claims [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.134868] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ffcd75-76f3-4e4f-b49a-95970ae320e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.143024] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.143024] env[68437]: value = "task-2943587" [ 668.143024] env[68437]: _type = "Task" [ 668.143024] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.148791] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ada3a98-223c-4252-b259-b8778f39aefb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.162875] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943587, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.175707] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a5e27b-59f4-4ef7-b30e-4c94f90d421a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.185751] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a6382b-5f4a-48fc-91b6-45e0e4f8f566 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.218627] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181102MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 668.218822] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.228637] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943584, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.400927] env[68437]: DEBUG nova.network.neutron [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Successfully updated port: 1856e5ed-7e1c-4837-a8bc-75086deed489 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 668.638301] env[68437]: DEBUG nova.compute.utils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 668.645810] env[68437]: DEBUG nova.compute.manager [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 668.645810] env[68437]: DEBUG nova.network.neutron [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 668.657746] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943587, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.728677] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943584, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667013} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.728978] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Copied Virtual Disk [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk to [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 668.729763] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleting the datastore file [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 668.729763] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-931bfc42-4c19-4d2a-a029-e3687e4bd150 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.735176] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 668.735176] env[68437]: value = "task-2943588" [ 668.735176] env[68437]: _type = "Task" [ 668.735176] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.740770] env[68437]: DEBUG nova.policy [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '894a53f57a104c51945fa90c168a0483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68baf1daffa842b4adb854fe0cec9524', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 668.747854] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.903608] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquiring lock "refresh_cache-ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.903760] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquired lock "refresh_cache-ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.903955] env[68437]: DEBUG nova.network.neutron [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 669.143549] env[68437]: DEBUG nova.compute.manager [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 669.163342] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943587, 'name': CreateVM_Task, 'duration_secs': 0.588876} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.163342] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 669.164732] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.164940] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.165240] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 669.166176] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84b284d3-d6c7-4b32-b9ac-3d5b2fdc6f5e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.175014] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 669.175014] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525d03a7-fab3-7906-238b-122db7f1a106" [ 669.175014] env[68437]: _type = "Task" [ 669.175014] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.183883] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525d03a7-fab3-7906-238b-122db7f1a106, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.249846] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.029825} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.250470] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 669.250470] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Moving file from [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719/a272f526-6b8d-4a29-bd06-cd29ab5fabbe to [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe. {{(pid=68437) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 669.251492] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-17477f05-4a96-458b-bae9-390cc6dd381d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.262724] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 669.262724] env[68437]: value = "task-2943589" [ 669.262724] env[68437]: _type = "Task" [ 669.262724] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.271560] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943589, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.342337] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36746f8f-64e3-4391-831a-ea5784f23e5e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.351372] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3436033e-cb13-4b35-8a25-19193b78ea16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.391452] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7292b02e-c285-4307-9906-d111b7d68b42 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.406021] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee18cd18-4c2c-4492-8256-b7b0c62ba783 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.421024] env[68437]: DEBUG nova.compute.provider_tree [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.532942] env[68437]: DEBUG nova.network.neutron [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 669.600153] env[68437]: DEBUG nova.compute.manager [req-57bfac6b-774c-4c63-9621-81ef7fa6f528 req-cd168e65-4c0e-46bd-b882-c058f82b80d8 service nova] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Received event network-changed-9e8115c0-b1ad-464a-9628-a7845a89de10 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 669.600357] env[68437]: DEBUG nova.compute.manager [req-57bfac6b-774c-4c63-9621-81ef7fa6f528 req-cd168e65-4c0e-46bd-b882-c058f82b80d8 service nova] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Refreshing instance network info cache due to event network-changed-9e8115c0-b1ad-464a-9628-a7845a89de10. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 669.600565] env[68437]: DEBUG oslo_concurrency.lockutils [req-57bfac6b-774c-4c63-9621-81ef7fa6f528 req-cd168e65-4c0e-46bd-b882-c058f82b80d8 service nova] Acquiring lock "refresh_cache-df3fbf16-d3d9-4138-b563-6ea09dd233b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.600713] env[68437]: DEBUG oslo_concurrency.lockutils [req-57bfac6b-774c-4c63-9621-81ef7fa6f528 req-cd168e65-4c0e-46bd-b882-c058f82b80d8 service nova] Acquired lock "refresh_cache-df3fbf16-d3d9-4138-b563-6ea09dd233b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.600869] env[68437]: DEBUG nova.network.neutron [req-57bfac6b-774c-4c63-9621-81ef7fa6f528 req-cd168e65-4c0e-46bd-b882-c058f82b80d8 service nova] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Refreshing network info cache for port 9e8115c0-b1ad-464a-9628-a7845a89de10 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 669.628523] env[68437]: DEBUG nova.compute.manager [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Received event network-changed-037e71f7-843b-4af0-ad1e-1289fc16a69b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 669.628583] env[68437]: DEBUG nova.compute.manager [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Refreshing instance network info cache due to event network-changed-037e71f7-843b-4af0-ad1e-1289fc16a69b. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 669.628790] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] Acquiring lock "refresh_cache-0484ccee-f003-4101-87c5-fed92f095d2d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.628936] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] Acquired lock "refresh_cache-0484ccee-f003-4101-87c5-fed92f095d2d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.629114] env[68437]: DEBUG nova.network.neutron [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Refreshing network info cache for port 037e71f7-843b-4af0-ad1e-1289fc16a69b {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 669.688795] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.689184] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.691894] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.740122] env[68437]: DEBUG nova.network.neutron [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Successfully created port: 755ab792-6755-4f3f-8d83-38106672f90b {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.773830] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943589, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.029116} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.774155] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] File moved {{(pid=68437) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 669.774708] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Cleaning up location [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719 {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 669.774903] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleting the datastore file [datastore1] vmware_temp/24e6a9d6-5d88-4599-945b-35e97cf88719 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 669.775531] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-248a4978-0775-4fc6-95c5-0e1a37901a31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.783376] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 669.783376] env[68437]: value = "task-2943590" [ 669.783376] env[68437]: _type = "Task" [ 669.783376] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.791755] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.925273] env[68437]: DEBUG nova.scheduler.client.report [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.159285] env[68437]: DEBUG nova.compute.manager [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 670.190939] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 670.191174] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 670.191335] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 670.191525] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 670.191674] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 670.191820] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 670.192711] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 670.192992] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 670.193318] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 670.193832] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 670.193832] env[68437]: DEBUG nova.virt.hardware [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 670.194867] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af265a4d-f80f-49e7-b7bc-6847d63bc2e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.203947] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64683dc-6c73-4cdf-923d-78e3631ab78f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.301345] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024336} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.301345] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 670.301345] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15d4d9c2-0548-4bd0-a1b9-7f169cf73390 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.307601] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 670.307601] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d2adca-0dd8-d8e7-9d58-2ad97b7c7b2e" [ 670.307601] env[68437]: _type = "Task" [ 670.307601] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.315856] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d2adca-0dd8-d8e7-9d58-2ad97b7c7b2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.403696] env[68437]: DEBUG nova.network.neutron [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Updating instance_info_cache with network_info: [{"id": "1856e5ed-7e1c-4837-a8bc-75086deed489", "address": "fa:16:3e:1f:f8:91", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1856e5ed-7e", "ovs_interfaceid": "1856e5ed-7e1c-4837-a8bc-75086deed489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.433208] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.433208] env[68437]: DEBUG nova.compute.manager [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 670.433777] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.215s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.459873] env[68437]: DEBUG nova.network.neutron [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Successfully updated port: 095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 670.821858] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d2adca-0dd8-d8e7-9d58-2ad97b7c7b2e, 'name': SearchDatastore_Task, 'duration_secs': 0.011853} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.822250] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.822523] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 0484ccee-f003-4101-87c5-fed92f095d2d/0484ccee-f003-4101-87c5-fed92f095d2d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 670.822801] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.822982] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 670.823252] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e5c7101-5b6d-437f-8426-5deb5336b795 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.831717] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57966cbb-a41a-487d-927c-f7a669b0f3b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.839655] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 670.839655] env[68437]: value = "task-2943591" [ 670.839655] env[68437]: _type = "Task" [ 670.839655] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.846457] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 670.846457] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 670.846877] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86188cee-9b83-49f1-80ac-154e09504b2d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.854355] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.857164] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 670.857164] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52cb33b3-258e-1460-85c7-dc443e472f0e" [ 670.857164] env[68437]: _type = "Task" [ 670.857164] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.867142] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52cb33b3-258e-1460-85c7-dc443e472f0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.907215] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Releasing lock "refresh_cache-ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.910493] env[68437]: DEBUG nova.compute.manager [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Instance network_info: |[{"id": "1856e5ed-7e1c-4837-a8bc-75086deed489", "address": "fa:16:3e:1f:f8:91", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1856e5ed-7e", "ovs_interfaceid": "1856e5ed-7e1c-4837-a8bc-75086deed489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 670.910661] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:f8:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1856e5ed-7e1c-4837-a8bc-75086deed489', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.918313] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Creating folder: Project (2c89cb01775f48a0ad00891a9ab90909). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 670.919112] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfa098bb-4aa5-4f8f-a788-32c2c901a64e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.931575] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Created folder: Project (2c89cb01775f48a0ad00891a9ab90909) in parent group-v590848. [ 670.931844] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Creating folder: Instances. Parent ref: group-v590858. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 670.932279] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8cd7b19-e136-47f0-8b67-daad75ee0d3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.937479] env[68437]: DEBUG nova.compute.utils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 670.946474] env[68437]: DEBUG nova.compute.manager [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 670.948417] env[68437]: DEBUG nova.network.neutron [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 670.949099] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Created folder: Instances in parent group-v590858. [ 670.949977] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.949977] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 670.950404] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbc8cf59-a949-41e4-9ec5-1a7057d3cded {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.970407] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.970407] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquired lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.970407] env[68437]: DEBUG nova.network.neutron [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 670.977433] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.977433] env[68437]: value = "task-2943594" [ 670.977433] env[68437]: _type = "Task" [ 670.977433] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.992782] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943594, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.195522] env[68437]: DEBUG nova.policy [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b43de0a5d69a4f1b97ccb57168a64bbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73180707777547908c86bf1771a04d05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 671.220470] env[68437]: DEBUG nova.network.neutron [req-57bfac6b-774c-4c63-9621-81ef7fa6f528 req-cd168e65-4c0e-46bd-b882-c058f82b80d8 service nova] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Updated VIF entry in instance network info cache for port 9e8115c0-b1ad-464a-9628-a7845a89de10. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 671.221017] env[68437]: DEBUG nova.network.neutron [req-57bfac6b-774c-4c63-9621-81ef7fa6f528 req-cd168e65-4c0e-46bd-b882-c058f82b80d8 service nova] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Updating instance_info_cache with network_info: [{"id": "9e8115c0-b1ad-464a-9628-a7845a89de10", "address": "fa:16:3e:09:e8:c9", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8115c0-b1", "ovs_interfaceid": "9e8115c0-b1ad-464a-9628-a7845a89de10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.275912] env[68437]: DEBUG nova.network.neutron [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Updated VIF entry in instance network info cache for port 037e71f7-843b-4af0-ad1e-1289fc16a69b. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 671.276361] env[68437]: DEBUG nova.network.neutron [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Updating instance_info_cache with network_info: [{"id": "037e71f7-843b-4af0-ad1e-1289fc16a69b", "address": "fa:16:3e:32:a2:64", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap037e71f7-84", "ovs_interfaceid": "037e71f7-843b-4af0-ad1e-1289fc16a69b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.352573] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943591, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508821} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.353159] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 0484ccee-f003-4101-87c5-fed92f095d2d/0484ccee-f003-4101-87c5-fed92f095d2d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 671.354638] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 671.354638] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ce53402-f045-4380-8cf1-bd19b0a4bac9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.365546] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 671.365546] env[68437]: value = "task-2943595" [ 671.365546] env[68437]: _type = "Task" [ 671.365546] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.373075] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52cb33b3-258e-1460-85c7-dc443e472f0e, 'name': SearchDatastore_Task, 'duration_secs': 0.00864} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.375712] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e2c6b52-0275-4bb0-92f1-b5b19e2c1932 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.381557] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.384724] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 671.384724] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529be929-c22f-836c-d0b8-ce44a572bdf9" [ 671.384724] env[68437]: _type = "Task" [ 671.384724] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.394560] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529be929-c22f-836c-d0b8-ce44a572bdf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.445228] env[68437]: DEBUG nova.compute.manager [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 671.480605] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance df3fbf16-d3d9-4138-b563-6ea09dd233b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 671.481096] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 0484ccee-f003-4101-87c5-fed92f095d2d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 671.481858] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 180f77ab-e468-410d-8e41-20291487ef5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 671.481858] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 671.481858] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 45595615-59c0-4c59-b18c-b49a3126dbb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 671.482027] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 27c18765-38cf-41d6-9139-9acffa94fbe6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 671.482060] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ce8fd88b-249b-4fee-80fc-35b795d24658 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 671.483058] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 671.483058] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 671.495284] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943594, 'name': CreateVM_Task, 'duration_secs': 0.388968} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.495463] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 671.496151] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.496310] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.496865] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 671.496865] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12013e44-f414-4c52-b8ac-22f1022b0b5a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.502027] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 671.502027] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bdab7f-53ba-277c-88b9-fa8dd2862127" [ 671.502027] env[68437]: _type = "Task" [ 671.502027] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.513670] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bdab7f-53ba-277c-88b9-fa8dd2862127, 'name': SearchDatastore_Task, 'duration_secs': 0.008655} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.513965] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.514209] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.514411] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.565331] env[68437]: DEBUG nova.network.neutron [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 671.600645] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca37405-0d5f-45aa-a19e-5d16b9c73c12 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.611952] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd9f803-885d-4177-9048-f61ee22d5ea4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.647407] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d683ea4d-3baf-4234-8088-38139576f285 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.656411] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea423af9-b723-4b55-b8bd-74e91f9738c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.672317] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.726440] env[68437]: DEBUG oslo_concurrency.lockutils [req-57bfac6b-774c-4c63-9621-81ef7fa6f528 req-cd168e65-4c0e-46bd-b882-c058f82b80d8 service nova] Releasing lock "refresh_cache-df3fbf16-d3d9-4138-b563-6ea09dd233b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.778884] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] Releasing lock "refresh_cache-0484ccee-f003-4101-87c5-fed92f095d2d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.779128] env[68437]: DEBUG nova.compute.manager [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Received event network-vif-plugged-803f314e-2d1c-448a-ae52-ae285d8689cb {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 671.779625] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] Acquiring lock "180f77ab-e468-410d-8e41-20291487ef5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.779914] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] Lock "180f77ab-e468-410d-8e41-20291487ef5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.780201] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] Lock "180f77ab-e468-410d-8e41-20291487ef5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.780619] env[68437]: DEBUG nova.compute.manager [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] No waiting events found dispatching network-vif-plugged-803f314e-2d1c-448a-ae52-ae285d8689cb {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 671.781417] env[68437]: WARNING nova.compute.manager [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Received unexpected event network-vif-plugged-803f314e-2d1c-448a-ae52-ae285d8689cb for instance with vm_state building and task_state spawning. [ 671.781417] env[68437]: DEBUG nova.compute.manager [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Received event network-changed-803f314e-2d1c-448a-ae52-ae285d8689cb {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 671.781417] env[68437]: DEBUG nova.compute.manager [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Refreshing instance network info cache due to event network-changed-803f314e-2d1c-448a-ae52-ae285d8689cb. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 671.781417] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] Acquiring lock "refresh_cache-180f77ab-e468-410d-8e41-20291487ef5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.781598] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] Acquired lock "refresh_cache-180f77ab-e468-410d-8e41-20291487ef5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.781775] env[68437]: DEBUG nova.network.neutron [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Refreshing network info cache for port 803f314e-2d1c-448a-ae52-ae285d8689cb {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 671.875354] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068438} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.875354] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 671.876639] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57dfcd3-4163-484a-ab0e-c3a6c8d0d43e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.915226] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 0484ccee-f003-4101-87c5-fed92f095d2d/0484ccee-f003-4101-87c5-fed92f095d2d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 671.923464] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94c41066-07be-418b-a097-fda28d0106cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.964756] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529be929-c22f-836c-d0b8-ce44a572bdf9, 'name': SearchDatastore_Task, 'duration_secs': 0.008248} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.964756] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 671.964756] env[68437]: value = "task-2943596" [ 671.964756] env[68437]: _type = "Task" [ 671.964756] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.964756] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.964756] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] df3fbf16-d3d9-4138-b563-6ea09dd233b8/df3fbf16-d3d9-4138-b563-6ea09dd233b8.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 671.965280] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.965280] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 671.965280] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7c2a7ee-69c7-4c47-8c5f-7fb1d57395a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.980555] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0282264f-9932-4e7f-bf75-a65fed849302 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.995903] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943596, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.000929] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 672.000929] env[68437]: value = "task-2943597" [ 672.000929] env[68437]: _type = "Task" [ 672.000929] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.014233] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943597, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.015136] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 672.015320] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 672.016533] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23acde7a-35ce-43a3-99d2-3dab4a0ad2bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.024024] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 672.024024] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d8478d-9cd8-613e-0d37-ff69e0af45ae" [ 672.024024] env[68437]: _type = "Task" [ 672.024024] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.032067] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d8478d-9cd8-613e-0d37-ff69e0af45ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.174651] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.310562] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "1186da93-57aa-40f4-8aae-702d039844d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.310852] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.464500] env[68437]: DEBUG nova.compute.manager [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 672.482096] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943596, 'name': ReconfigVM_Task, 'duration_secs': 0.325657} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.482096] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 0484ccee-f003-4101-87c5-fed92f095d2d/0484ccee-f003-4101-87c5-fed92f095d2d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.484512] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-faab8039-85ce-4e95-a9de-3738fb1b4c17 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.489121] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 672.489121] env[68437]: value = "task-2943598" [ 672.489121] env[68437]: _type = "Task" [ 672.489121] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.498958] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943598, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.510922] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943597, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489925} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.513337] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 672.513608] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.513765] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 672.513946] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.514209] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 672.514389] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 672.514593] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 672.514739] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 672.514939] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 672.515180] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 672.515558] env[68437]: DEBUG nova.virt.hardware [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 672.515883] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] df3fbf16-d3d9-4138-b563-6ea09dd233b8/df3fbf16-d3d9-4138-b563-6ea09dd233b8.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 672.516220] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 672.517140] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fecd87-380a-4352-8136-8704662d77d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.521314] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f173484-390b-4a63-8bdd-ede9724f3ff4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.533208] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f3e0c3-f999-4757-8b05-813babf01b7b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.538478] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 672.538478] env[68437]: value = "task-2943599" [ 672.538478] env[68437]: _type = "Task" [ 672.538478] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.560332] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d8478d-9cd8-613e-0d37-ff69e0af45ae, 'name': SearchDatastore_Task, 'duration_secs': 0.011904} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.561363] env[68437]: DEBUG nova.network.neutron [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Updating instance_info_cache with network_info: [{"id": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "address": "fa:16:3e:c4:8f:8d", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap095e5fc1-9f", "ovs_interfaceid": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.563754] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-901523cf-e4cd-4e0d-9ce7-418083a0eca8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.571294] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943599, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.574738] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 672.574738] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5284caca-ff3d-33bc-67fa-f224f9873c77" [ 672.574738] env[68437]: _type = "Task" [ 672.574738] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.585453] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5284caca-ff3d-33bc-67fa-f224f9873c77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.679971] env[68437]: DEBUG nova.network.neutron [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Successfully created port: 9f47f745-6f86-4d20-929b-376aeda67a6e {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 672.686415] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 672.688379] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.253s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.814996] env[68437]: DEBUG nova.compute.manager [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 672.970074] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquiring lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.970444] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.007692] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943598, 'name': Rename_Task, 'duration_secs': 0.14452} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.007692] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 673.007866] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b3b05de-1a4c-44b3-bac6-ab95e1d67f3d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.015928] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 673.015928] env[68437]: value = "task-2943600" [ 673.015928] env[68437]: _type = "Task" [ 673.015928] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.025842] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.055053] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943599, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072291} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.055053] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 673.055053] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5994caae-6b3a-4930-b6c0-816e7d112d75 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.070395] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Releasing lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.070395] env[68437]: DEBUG nova.compute.manager [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Instance network_info: |[{"id": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "address": "fa:16:3e:c4:8f:8d", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap095e5fc1-9f", "ovs_interfaceid": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 673.086196] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] df3fbf16-d3d9-4138-b563-6ea09dd233b8/df3fbf16-d3d9-4138-b563-6ea09dd233b8.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 673.086863] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:8f:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '863474bc-a24a-4823-828c-580a187829e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '095e5fc1-9fd6-4b04-b1af-3637ee220d7c', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 673.101069] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Creating folder: Project (3d7b3b9e525e494d896b8d6e874c3e8b). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 673.101487] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b625ff18-fc85-4e4f-bbaa-cd1768babb78 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.124419] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5aae814b-d028-4658-84d2-aa05508d08d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.141274] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5284caca-ff3d-33bc-67fa-f224f9873c77, 'name': SearchDatastore_Task, 'duration_secs': 0.008726} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.146408] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.146408] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 180f77ab-e468-410d-8e41-20291487ef5d/180f77ab-e468-410d-8e41-20291487ef5d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 673.146408] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 673.146408] env[68437]: value = "task-2943602" [ 673.146408] env[68437]: _type = "Task" [ 673.146408] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.146408] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Created folder: Project (3d7b3b9e525e494d896b8d6e874c3e8b) in parent group-v590848. [ 673.146666] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Creating folder: Instances. Parent ref: group-v590861. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 673.146666] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.146666] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.146666] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfde2a6d-b6fb-4c54-a77d-9d4726ac7fd7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.148612] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d5ec62f-4247-46c4-aaf6-f6b78fd48668 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.153969] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69993f9c-82af-4472-ba3a-f5335f11e395 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.167797] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 673.167797] env[68437]: value = "task-2943604" [ 673.167797] env[68437]: _type = "Task" [ 673.167797] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.170568] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Created folder: Instances in parent group-v590861. [ 673.170568] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 673.174326] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 673.174830] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.174830] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.178731] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c73c8983-6d4e-4aaa-8502-6253e38032ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.195022] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41e62628-29bb-476f-92a5-acd5011ab4db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.195744] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943602, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.201758] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.204258] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 673.204258] env[68437]: value = "task-2943605" [ 673.204258] env[68437]: _type = "Task" [ 673.204258] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.205101] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 673.205101] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52512d96-e123-79e4-aba2-3dce9bd30f45" [ 673.205101] env[68437]: _type = "Task" [ 673.205101] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.216273] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943605, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.220645] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52512d96-e123-79e4-aba2-3dce9bd30f45, 'name': SearchDatastore_Task, 'duration_secs': 0.010804} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.221119] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7fd78fe-503a-49f9-9132-d19aed2ce65c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.228432] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 673.228432] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52017a9e-f6a1-86ca-ebd1-7b71797caf3f" [ 673.228432] env[68437]: _type = "Task" [ 673.228432] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.237233] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52017a9e-f6a1-86ca-ebd1-7b71797caf3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.324449] env[68437]: DEBUG nova.network.neutron [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Updated VIF entry in instance network info cache for port 803f314e-2d1c-448a-ae52-ae285d8689cb. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 673.324867] env[68437]: DEBUG nova.network.neutron [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Updating instance_info_cache with network_info: [{"id": "803f314e-2d1c-448a-ae52-ae285d8689cb", "address": "fa:16:3e:0a:02:00", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap803f314e-2d", "ovs_interfaceid": "803f314e-2d1c-448a-ae52-ae285d8689cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.348065] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.348373] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.350546] env[68437]: INFO nova.compute.claims [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.473695] env[68437]: DEBUG nova.compute.manager [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 673.534191] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943600, 'name': PowerOnVM_Task} progress is 92%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.661415] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943602, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.680859] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943604, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504187} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.681047] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 180f77ab-e468-410d-8e41-20291487ef5d/180f77ab-e468-410d-8e41-20291487ef5d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 673.681279] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 673.681542] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4f0ef0a-1886-4641-93ba-c24f3b6106a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.688447] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 673.688447] env[68437]: value = "task-2943606" [ 673.688447] env[68437]: _type = "Task" [ 673.688447] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.697888] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943606, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.715589] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943605, 'name': CreateVM_Task, 'duration_secs': 0.503187} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.715589] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 673.715589] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.715589] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.715589] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 673.715773] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd9c60c4-fe8c-4c8d-b5d5-f67d8bce03d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.720828] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 673.720828] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5247612b-3538-8353-b828-1ce9173b44bf" [ 673.720828] env[68437]: _type = "Task" [ 673.720828] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.731037] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5247612b-3538-8353-b828-1ce9173b44bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.741081] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52017a9e-f6a1-86ca-ebd1-7b71797caf3f, 'name': SearchDatastore_Task, 'duration_secs': 0.009079} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.741081] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.741081] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236/ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 673.741490] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aaeb69eb-1f06-4d88-b08b-bd72bb969bda {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.747856] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 673.747856] env[68437]: value = "task-2943607" [ 673.747856] env[68437]: _type = "Task" [ 673.747856] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.756773] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.828095] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b548039-e8c4-4fff-ad14-4f6f9753b39d req-aaf2fc37-b2ea-4e48-8dbe-0e549da5a5ce service nova] Releasing lock "refresh_cache-180f77ab-e468-410d-8e41-20291487ef5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.976644] env[68437]: DEBUG nova.network.neutron [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Successfully updated port: 755ab792-6755-4f3f-8d83-38106672f90b {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 674.008890] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.041254] env[68437]: DEBUG oslo_vmware.api [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943600, 'name': PowerOnVM_Task, 'duration_secs': 0.557606} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.041400] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 674.041726] env[68437]: INFO nova.compute.manager [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Took 13.04 seconds to spawn the instance on the hypervisor. [ 674.041989] env[68437]: DEBUG nova.compute.manager [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 674.042926] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d054ac19-e25d-421f-9806-84677c42b166 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.165120] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943602, 'name': ReconfigVM_Task, 'duration_secs': 0.553569} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.168019] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Reconfigured VM instance instance-00000001 to attach disk [datastore1] df3fbf16-d3d9-4138-b563-6ea09dd233b8/df3fbf16-d3d9-4138-b563-6ea09dd233b8.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 674.168019] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c52d1d57-9fc9-4379-b1e6-1c537875c457 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.175504] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 674.175504] env[68437]: value = "task-2943608" [ 674.175504] env[68437]: _type = "Task" [ 674.175504] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.192427] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943608, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.205260] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943606, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070293} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.205689] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 674.207173] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb0764d-2935-4ec7-9937-c37fa8a662e4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.232363] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 180f77ab-e468-410d-8e41-20291487ef5d/180f77ab-e468-410d-8e41-20291487ef5d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 674.235642] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3fe7f7a3-2c4e-4c38-8d15-ea43da7f7567 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.257343] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5247612b-3538-8353-b828-1ce9173b44bf, 'name': SearchDatastore_Task, 'duration_secs': 0.008716} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.261392] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.261392] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 674.261392] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.261392] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.261538] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 674.261538] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 674.261538] env[68437]: value = "task-2943609" [ 674.261538] env[68437]: _type = "Task" [ 674.261538] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.263080] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f65e6685-1a75-40b1-a3c8-e9130bdff5e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.265341] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464889} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.270150] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236/ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 674.270150] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 674.270150] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e40f8c35-5ff0-4e0f-bc72-2e2953d2430f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.278530] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.281675] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 674.281675] env[68437]: value = "task-2943610" [ 674.281675] env[68437]: _type = "Task" [ 674.281675] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.281675] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 674.281675] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 674.281985] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5d12c72-0f7e-4242-9735-e637a0a44e0b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.290595] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 674.290595] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ba7821-3027-6ac4-a27a-5ec0310af408" [ 674.290595] env[68437]: _type = "Task" [ 674.290595] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.294863] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.301621] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ba7821-3027-6ac4-a27a-5ec0310af408, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.359024] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "5abc2c5a-2177-4d77-97ce-872808bb47ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.359356] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.485966] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.486200] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.486397] env[68437]: DEBUG nova.network.neutron [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 674.567540] env[68437]: INFO nova.compute.manager [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Took 19.31 seconds to build instance. [ 674.628617] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b43f66-6674-4956-92bf-3dc6a264bfc4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.638611] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7dcd26-b1df-45c3-bef3-046e6c12c895 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.673649] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003192ea-09ab-4470-ac91-1723cffae55a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.687434] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943608, 'name': Rename_Task, 'duration_secs': 0.183332} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.687854] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.689290] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288b7ac0-3491-4d00-b800-3bf19af86826 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.696037] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46c6a2dd-c238-4f1a-b595-8ef93eb6a5c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.707727] env[68437]: DEBUG nova.compute.provider_tree [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.710779] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 674.710779] env[68437]: value = "task-2943611" [ 674.710779] env[68437]: _type = "Task" [ 674.710779] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.718601] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943611, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.777145] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943609, 'name': ReconfigVM_Task, 'duration_secs': 0.310897} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.777488] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 180f77ab-e468-410d-8e41-20291487ef5d/180f77ab-e468-410d-8e41-20291487ef5d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 674.778697] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d9d51db-0e12-480b-a2d1-5a6423f53a29 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.786879] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 674.786879] env[68437]: value = "task-2943612" [ 674.786879] env[68437]: _type = "Task" [ 674.786879] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.794072] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098305} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.797449] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 674.802090] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7265aa-dff2-45ee-9731-0ad4e9a98af2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.804907] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943612, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.811693] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ba7821-3027-6ac4-a27a-5ec0310af408, 'name': SearchDatastore_Task, 'duration_secs': 0.009625} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.829755] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236/ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 674.830083] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02eb9c04-c225-4ba0-8fe4-955deb6f94fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.833735] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f6acdf5-a8b0-4445-a1be-df391dd3b9ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.854012] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 674.854012] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52412edf-2a5e-a881-5733-60ec4693d8e4" [ 674.854012] env[68437]: _type = "Task" [ 674.854012] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.855503] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 674.855503] env[68437]: value = "task-2943613" [ 674.855503] env[68437]: _type = "Task" [ 674.855503] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.866863] env[68437]: DEBUG nova.compute.manager [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 674.869860] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943613, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.872986] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52412edf-2a5e-a881-5733-60ec4693d8e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.059799] env[68437]: DEBUG nova.network.neutron [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 675.071241] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fd8fdab1-a076-44a9-aa77-7ba6af4981d0 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "0484ccee-f003-4101-87c5-fed92f095d2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.829s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.210765] env[68437]: DEBUG nova.scheduler.client.report [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 675.227291] env[68437]: DEBUG oslo_vmware.api [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943611, 'name': PowerOnVM_Task, 'duration_secs': 0.522075} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.230078] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.230078] env[68437]: INFO nova.compute.manager [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Took 16.48 seconds to spawn the instance on the hypervisor. [ 675.230078] env[68437]: DEBUG nova.compute.manager [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 675.230078] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8131c51-eaca-4b83-8efd-2361602d9b88 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.304028] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943612, 'name': Rename_Task, 'duration_secs': 0.168586} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.304028] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 675.304028] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7df88d23-7084-4ec1-a43c-dffbe4e8740b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.308562] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 675.308562] env[68437]: value = "task-2943614" [ 675.308562] env[68437]: _type = "Task" [ 675.308562] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.319146] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.376893] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52412edf-2a5e-a881-5733-60ec4693d8e4, 'name': SearchDatastore_Task, 'duration_secs': 0.023414} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.377172] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943613, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.379872] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.379872] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 45595615-59c0-4c59-b18c-b49a3126dbb7/45595615-59c0-4c59-b18c-b49a3126dbb7.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 675.382606] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51927fa3-d9c2-4c47-be79-c405d08ab3b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.390218] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 675.390218] env[68437]: value = "task-2943615" [ 675.390218] env[68437]: _type = "Task" [ 675.390218] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.393665] env[68437]: DEBUG nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Received event network-vif-plugged-1856e5ed-7e1c-4837-a8bc-75086deed489 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 675.393965] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Acquiring lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.394679] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.395066] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.395685] env[68437]: DEBUG nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] No waiting events found dispatching network-vif-plugged-1856e5ed-7e1c-4837-a8bc-75086deed489 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 675.395795] env[68437]: WARNING nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Received unexpected event network-vif-plugged-1856e5ed-7e1c-4837-a8bc-75086deed489 for instance with vm_state building and task_state spawning. [ 675.396048] env[68437]: DEBUG nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Received event network-changed-1856e5ed-7e1c-4837-a8bc-75086deed489 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 675.396301] env[68437]: DEBUG nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Refreshing instance network info cache due to event network-changed-1856e5ed-7e1c-4837-a8bc-75086deed489. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 675.396557] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Acquiring lock "refresh_cache-ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.396891] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Acquired lock "refresh_cache-ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.397255] env[68437]: DEBUG nova.network.neutron [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Refreshing network info cache for port 1856e5ed-7e1c-4837-a8bc-75086deed489 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 675.410179] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.410466] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.592019] env[68437]: DEBUG nova.network.neutron [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updating instance_info_cache with network_info: [{"id": "755ab792-6755-4f3f-8d83-38106672f90b", "address": "fa:16:3e:c5:b6:a0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755ab792-67", "ovs_interfaceid": "755ab792-6755-4f3f-8d83-38106672f90b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.721248] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.722126] env[68437]: DEBUG nova.compute.manager [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 675.725620] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.717s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.727168] env[68437]: INFO nova.compute.claims [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.762096] env[68437]: INFO nova.compute.manager [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Took 21.30 seconds to build instance. [ 675.826872] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.875088] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943613, 'name': ReconfigVM_Task, 'duration_secs': 0.801377} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.875555] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Reconfigured VM instance instance-00000004 to attach disk [datastore1] ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236/ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 675.878480] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f410e96-a485-45a4-b0f9-8a202d5eaa7c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.885045] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 675.885045] env[68437]: value = "task-2943616" [ 675.885045] env[68437]: _type = "Task" [ 675.885045] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.896146] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943616, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.908517] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484462} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.909846] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 45595615-59c0-4c59-b18c-b49a3126dbb7/45595615-59c0-4c59-b18c-b49a3126dbb7.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 675.909846] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 675.909846] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b47b3899-3866-4ac0-adb3-cf1b74631e3b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.916270] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 675.916270] env[68437]: value = "task-2943617" [ 675.916270] env[68437]: _type = "Task" [ 675.916270] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.923785] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943617, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.100359] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.100359] env[68437]: DEBUG nova.compute.manager [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Instance network_info: |[{"id": "755ab792-6755-4f3f-8d83-38106672f90b", "address": "fa:16:3e:c5:b6:a0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755ab792-67", "ovs_interfaceid": "755ab792-6755-4f3f-8d83-38106672f90b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 676.100765] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:b6:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '755ab792-6755-4f3f-8d83-38106672f90b', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 676.112528] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Creating folder: Project (68baf1daffa842b4adb854fe0cec9524). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 676.112857] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46465dc7-a5de-47d0-90e0-d78829674a2a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.126483] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Created folder: Project (68baf1daffa842b4adb854fe0cec9524) in parent group-v590848. [ 676.126601] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Creating folder: Instances. Parent ref: group-v590864. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 676.127419] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bcd8abc5-6fb8-47ea-a199-10c197b30174 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.136474] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Created folder: Instances in parent group-v590864. [ 676.136726] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 676.136931] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 676.137145] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fe800ff-3a73-4621-ad4a-7ecc98702fd7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.163882] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 676.163882] env[68437]: value = "task-2943620" [ 676.163882] env[68437]: _type = "Task" [ 676.163882] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.175352] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943620, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.232414] env[68437]: DEBUG nova.compute.utils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 676.233565] env[68437]: DEBUG nova.compute.manager [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 676.235794] env[68437]: DEBUG nova.network.neutron [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 676.264797] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a2d07b2-db15-48b9-9aaa-d04c3f19e715 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.821s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.324292] env[68437]: DEBUG oslo_vmware.api [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943614, 'name': PowerOnVM_Task, 'duration_secs': 0.814517} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.325254] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 676.325254] env[68437]: INFO nova.compute.manager [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Took 13.05 seconds to spawn the instance on the hypervisor. [ 676.325254] env[68437]: DEBUG nova.compute.manager [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 676.326175] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cac575a-ee45-4ae1-a12e-111bfba6e392 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.396365] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943616, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.427018] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943617, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.480271} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.427018] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 676.427157] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb9624b-83dc-4a11-a3e1-2bccb2bc7e25 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.457465] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 45595615-59c0-4c59-b18c-b49a3126dbb7/45595615-59c0-4c59-b18c-b49a3126dbb7.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 676.460336] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7dc6f7a-175b-41dc-ba6e-6099e141d7ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.479500] env[68437]: DEBUG nova.network.neutron [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Successfully updated port: 9f47f745-6f86-4d20-929b-376aeda67a6e {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 676.479500] env[68437]: DEBUG nova.policy [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17bdd32924094cc9a59a1cb1c27f0c36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4b1c4be5c524504ae9346d2e4ec8008', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 676.486716] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 676.486716] env[68437]: value = "task-2943621" [ 676.486716] env[68437]: _type = "Task" [ 676.486716] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.498387] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943621, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.679575] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943620, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.738945] env[68437]: DEBUG nova.compute.manager [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 676.812659] env[68437]: DEBUG nova.network.neutron [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Updated VIF entry in instance network info cache for port 1856e5ed-7e1c-4837-a8bc-75086deed489. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 676.812994] env[68437]: DEBUG nova.network.neutron [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Updating instance_info_cache with network_info: [{"id": "1856e5ed-7e1c-4837-a8bc-75086deed489", "address": "fa:16:3e:1f:f8:91", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.188", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1856e5ed-7e", "ovs_interfaceid": "1856e5ed-7e1c-4837-a8bc-75086deed489", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.843840] env[68437]: INFO nova.compute.manager [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Took 20.68 seconds to build instance. [ 676.898567] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943616, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.954406] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d984cd-3348-400f-a1f8-c8101ff0a418 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.962804] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e2ecce-fcfb-4318-abe0-8d13dc2fd3e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.002918] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "refresh_cache-ce8fd88b-249b-4fee-80fc-35b795d24658" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.003135] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquired lock "refresh_cache-ce8fd88b-249b-4fee-80fc-35b795d24658" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.003293] env[68437]: DEBUG nova.network.neutron [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 677.009701] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65ea0d6-b983-4d34-8c34-a18b9f8da710 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.021789] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.023128] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7058cd64-fa90-4ca7-9dad-6fe900c0bbec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.041106] env[68437]: DEBUG nova.compute.provider_tree [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.176044] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943620, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.319812] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Releasing lock "refresh_cache-ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 677.320130] env[68437]: DEBUG nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Received event network-vif-plugged-095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 677.320340] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Acquiring lock "45595615-59c0-4c59-b18c-b49a3126dbb7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.320544] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Lock "45595615-59c0-4c59-b18c-b49a3126dbb7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.320703] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Lock "45595615-59c0-4c59-b18c-b49a3126dbb7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.320867] env[68437]: DEBUG nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] No waiting events found dispatching network-vif-plugged-095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 677.323471] env[68437]: WARNING nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Received unexpected event network-vif-plugged-095e5fc1-9fd6-4b04-b1af-3637ee220d7c for instance with vm_state building and task_state spawning. [ 677.323471] env[68437]: DEBUG nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Received event network-changed-095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 677.323471] env[68437]: DEBUG nova.compute.manager [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Refreshing instance network info cache due to event network-changed-095e5fc1-9fd6-4b04-b1af-3637ee220d7c. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 677.323471] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Acquiring lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.323471] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Acquired lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.323832] env[68437]: DEBUG nova.network.neutron [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Refreshing network info cache for port 095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 677.345731] env[68437]: DEBUG nova.network.neutron [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Successfully created port: 4fd952c0-7921-4632-b5de-2fe90c4bba05 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.347909] env[68437]: DEBUG oslo_concurrency.lockutils [None req-05bc2c67-f160-4888-9b5c-f8be73876658 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "180f77ab-e468-410d-8e41-20291487ef5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.219s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.404583] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943616, 'name': Rename_Task, 'duration_secs': 1.158458} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.404911] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 677.406365] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1200563c-caef-47ac-a43f-437e12fd6894 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.415449] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 677.415449] env[68437]: value = "task-2943622" [ 677.415449] env[68437]: _type = "Task" [ 677.415449] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.431505] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.523715] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943621, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.552021] env[68437]: DEBUG nova.scheduler.client.report [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 677.597100] env[68437]: DEBUG nova.network.neutron [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 677.676799] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943620, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.750443] env[68437]: DEBUG nova.compute.manager [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 677.791672] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 677.791963] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 677.792146] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 677.793500] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 677.793500] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 677.793500] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 677.793500] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 677.793500] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 677.793829] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 677.793829] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 677.793829] env[68437]: DEBUG nova.virt.hardware [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 677.794637] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a4c087-ab66-4b50-8508-7b02520ee2dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.803704] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccb7a77-1647-49eb-973f-9ea118b4ddcc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.927787] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943622, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.980411] env[68437]: DEBUG nova.network.neutron [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Updating instance_info_cache with network_info: [{"id": "9f47f745-6f86-4d20-929b-376aeda67a6e", "address": "fa:16:3e:80:61:4f", "network": {"id": "0259266c-be3d-4f63-b92f-a450f922ff76", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1548136929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73180707777547908c86bf1771a04d05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f47f745-6f", "ovs_interfaceid": "9f47f745-6f86-4d20-929b-376aeda67a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.022856] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943621, 'name': ReconfigVM_Task, 'duration_secs': 1.505048} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.023141] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 45595615-59c0-4c59-b18c-b49a3126dbb7/45595615-59c0-4c59-b18c-b49a3126dbb7.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 678.023958] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bcf7e42-a060-4be9-945d-53cdc6968f7d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.031204] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 678.031204] env[68437]: value = "task-2943623" [ 678.031204] env[68437]: _type = "Task" [ 678.031204] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.040162] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943623, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.055741] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.056315] env[68437]: DEBUG nova.compute.manager [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 678.063155] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.651s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.063155] env[68437]: INFO nova.compute.claims [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 678.180491] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943620, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.428826] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943622, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.429949] env[68437]: DEBUG nova.network.neutron [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Updated VIF entry in instance network info cache for port 095e5fc1-9fd6-4b04-b1af-3637ee220d7c. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 678.430360] env[68437]: DEBUG nova.network.neutron [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Updating instance_info_cache with network_info: [{"id": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "address": "fa:16:3e:c4:8f:8d", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap095e5fc1-9f", "ovs_interfaceid": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.484655] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Releasing lock "refresh_cache-ce8fd88b-249b-4fee-80fc-35b795d24658" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.485024] env[68437]: DEBUG nova.compute.manager [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Instance network_info: |[{"id": "9f47f745-6f86-4d20-929b-376aeda67a6e", "address": "fa:16:3e:80:61:4f", "network": {"id": "0259266c-be3d-4f63-b92f-a450f922ff76", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1548136929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73180707777547908c86bf1771a04d05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f47f745-6f", "ovs_interfaceid": "9f47f745-6f86-4d20-929b-376aeda67a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 678.485795] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:61:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a407774d-9c2a-411d-9d6f-9ca733b97f3f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f47f745-6f86-4d20-929b-376aeda67a6e', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 678.495514] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Creating folder: Project (73180707777547908c86bf1771a04d05). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 678.495514] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9ae3ea4-3b29-424e-b0b5-5bd7c189edbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.507908] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Created folder: Project (73180707777547908c86bf1771a04d05) in parent group-v590848. [ 678.507908] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Creating folder: Instances. Parent ref: group-v590867. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 678.507908] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91a89bba-ae4f-4e7f-aa0f-6b0cb8217be0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.518781] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Created folder: Instances in parent group-v590867. [ 678.518781] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 678.518781] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 678.518781] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d255540-ada3-42ff-8420-3d105ae153d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.547356] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943623, 'name': Rename_Task, 'duration_secs': 0.169591} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.549252] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 678.549579] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 678.549579] env[68437]: value = "task-2943626" [ 678.549579] env[68437]: _type = "Task" [ 678.549579] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.549803] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d64fd91-82ed-43d9-8d39-1a49d9d9064a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.564065] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943626, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.565603] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 678.565603] env[68437]: value = "task-2943627" [ 678.565603] env[68437]: _type = "Task" [ 678.565603] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.567961] env[68437]: DEBUG nova.compute.utils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 678.575022] env[68437]: DEBUG nova.compute.manager [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 678.575022] env[68437]: DEBUG nova.network.neutron [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 678.580486] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943627, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.680111] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943620, 'name': CreateVM_Task, 'duration_secs': 2.209968} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.680111] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 678.680893] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.681067] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.681387] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 678.681639] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07f8187c-96f0-4e55-a067-a245fd200da9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.687120] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 678.687120] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521f7f43-0472-ff1e-00c6-90698008d79c" [ 678.687120] env[68437]: _type = "Task" [ 678.687120] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.699608] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521f7f43-0472-ff1e-00c6-90698008d79c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.714043] env[68437]: DEBUG nova.policy [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2f5d468eaceb4f86bae00d31b670abb9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '518f367d97ad43bb9653dc4a5137e1bb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 678.932352] env[68437]: DEBUG oslo_vmware.api [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943622, 'name': PowerOnVM_Task, 'duration_secs': 1.402558} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.932352] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.932352] env[68437]: INFO nova.compute.manager [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Took 13.39 seconds to spawn the instance on the hypervisor. [ 678.932352] env[68437]: DEBUG nova.compute.manager [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.932352] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14c32b8-3393-4094-a1b3-27eabbd7859e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.939093] env[68437]: DEBUG oslo_concurrency.lockutils [req-5427758d-a9fe-4078-8619-b59853bc4415 req-c0ce13b7-4bcb-4294-9d69-3d72249f3dce service nova] Releasing lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.050917] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "19dde8dd-eae6-41a0-b147-c505db1cda15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.051127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "19dde8dd-eae6-41a0-b147-c505db1cda15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.062360] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943626, 'name': CreateVM_Task, 'duration_secs': 0.371509} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.062934] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 679.065235] env[68437]: DEBUG oslo_vmware.service [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbc6285-bfa1-4118-bf48-7789fc8e1d47 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.078623] env[68437]: DEBUG nova.compute.manager [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 679.089139] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.089139] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.089139] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 679.089139] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943627, 'name': PowerOnVM_Task} progress is 37%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.089139] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98f26db4-3770-46ac-98bb-f1d9382d7774 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.095475] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 679.095475] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520e9351-d115-517e-bbf5-c28608cc1249" [ 679.095475] env[68437]: _type = "Task" [ 679.095475] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.107973] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520e9351-d115-517e-bbf5-c28608cc1249, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.206235] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521f7f43-0472-ff1e-00c6-90698008d79c, 'name': SearchDatastore_Task, 'duration_secs': 0.012377} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.206235] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.206235] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.206235] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.206545] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.206545] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 679.206545] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-330d04f0-9a2b-4ebf-9a4e-beda42f35735 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.221022] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 679.221022] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 679.221022] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83bead76-6b00-4063-a610-b547b018b1ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.232050] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 679.232050] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527e9201-b91f-a53f-bda6-a325709a292c" [ 679.232050] env[68437]: _type = "Task" [ 679.232050] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.255282] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527e9201-b91f-a53f-bda6-a325709a292c, 'name': SearchDatastore_Task, 'duration_secs': 0.011473} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.255282] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ba305da-a9f2-42e2-b378-f4f7fb22e48d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.259128] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 679.259128] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52baedb3-fb67-25ff-7d73-f576c489fcf2" [ 679.259128] env[68437]: _type = "Task" [ 679.259128] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.272032] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52baedb3-fb67-25ff-7d73-f576c489fcf2, 'name': SearchDatastore_Task, 'duration_secs': 0.008914} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.272297] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.272563] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 27c18765-38cf-41d6-9139-9acffa94fbe6/27c18765-38cf-41d6-9139-9acffa94fbe6.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 679.272950] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a09084a-a167-4cae-8fb8-d6abf7db4547 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.280401] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 679.280401] env[68437]: value = "task-2943628" [ 679.280401] env[68437]: _type = "Task" [ 679.280401] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.296698] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.356093] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fc2507-b789-4558-b38b-b4188f54281f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.364983] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46b45f3-7467-44f3-a9c5-0f16dc7474c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.402035] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5ef324-fea4-435d-8816-01561f1abd53 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.410282] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437dbdd9-a8a9-406f-aad6-b462b2b4ce24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.427055] env[68437]: DEBUG nova.compute.provider_tree [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.455777] env[68437]: INFO nova.compute.manager [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Took 22.44 seconds to build instance. [ 679.554350] env[68437]: DEBUG nova.compute.manager [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 679.590187] env[68437]: DEBUG oslo_vmware.api [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943627, 'name': PowerOnVM_Task, 'duration_secs': 0.953819} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.590187] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 679.590187] env[68437]: INFO nova.compute.manager [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Took 11.78 seconds to spawn the instance on the hypervisor. [ 679.590365] env[68437]: DEBUG nova.compute.manager [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 679.591203] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db703e53-21f1-4b2c-ba29-332d68483700 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.610147] env[68437]: DEBUG nova.network.neutron [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Successfully created port: cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.619690] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.619690] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.619690] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.619690] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.619987] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 679.619987] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15c05d24-ff9c-4671-a7ca-2ae8c32d1987 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.631796] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 679.631956] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 679.636067] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2518c952-75de-4f52-8793-309a9be71011 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.644808] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93b9f72e-5a51-4f77-9ad7-9081cd4c1df7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.654429] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 679.654429] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526fb380-68b6-20d2-5dff-37c6f69e4ac8" [ 679.654429] env[68437]: _type = "Task" [ 679.654429] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.667688] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526fb380-68b6-20d2-5dff-37c6f69e4ac8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.801499] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943628, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.842851] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquiring lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.843111] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.892905] env[68437]: DEBUG nova.network.neutron [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Successfully updated port: 4fd952c0-7921-4632-b5de-2fe90c4bba05 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 679.934145] env[68437]: DEBUG nova.scheduler.client.report [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 679.959568] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3039f9fe-b6f3-43d5-b3e5-120711ae7c41 tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.954s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.090617] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.101102] env[68437]: DEBUG nova.compute.manager [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 680.119019] env[68437]: INFO nova.compute.manager [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Took 22.03 seconds to build instance. [ 680.134061] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 680.134870] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 680.134870] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 680.134870] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 680.135058] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 680.135320] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 680.135398] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 680.135527] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 680.135792] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 680.135862] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 680.136036] env[68437]: DEBUG nova.virt.hardware [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 680.137251] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b865229-ab29-48df-87c2-b313e7c74e13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.143638] env[68437]: DEBUG nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Received event network-vif-plugged-755ab792-6755-4f3f-8d83-38106672f90b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 680.143638] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Acquiring lock "27c18765-38cf-41d6-9139-9acffa94fbe6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.144182] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.144182] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.144182] env[68437]: DEBUG nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] No waiting events found dispatching network-vif-plugged-755ab792-6755-4f3f-8d83-38106672f90b {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 680.144863] env[68437]: WARNING nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Received unexpected event network-vif-plugged-755ab792-6755-4f3f-8d83-38106672f90b for instance with vm_state building and task_state spawning. [ 680.144863] env[68437]: DEBUG nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Received event network-changed-755ab792-6755-4f3f-8d83-38106672f90b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 680.144863] env[68437]: DEBUG nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Refreshing instance network info cache due to event network-changed-755ab792-6755-4f3f-8d83-38106672f90b. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 680.144863] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Acquiring lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.145824] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Acquired lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.145824] env[68437]: DEBUG nova.network.neutron [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Refreshing network info cache for port 755ab792-6755-4f3f-8d83-38106672f90b {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 680.153642] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bfd20b-4bda-4488-bcb1-ee13a870eb3b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.178958] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Preparing fetch location {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 680.179385] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Creating directory with path [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 680.179612] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32858766-edf1-4159-983b-c5d8d8d0e67b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.198016] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Created directory with path [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 680.198016] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Fetch image to [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 680.198016] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Downloading image file data a272f526-6b8d-4a29-bd06-cd29ab5fabbe to [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk on the data store datastore2 {{(pid=68437) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 680.198016] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a218fdf9-504e-4a4f-8fec-711385a6a40c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.208249] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece2d7ba-3ca1-4f52-a42b-5aae098997ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.214219] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f050f237-ca07-40c3-a2fb-e4b720f42305 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.246370] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7064ae3-daa3-4772-8c45-3a6c4a43e71d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.253251] env[68437]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-99c94a62-6ad3-4aff-a945-16588b44948a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.284238] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Downloading image file data a272f526-6b8d-4a29-bd06-cd29ab5fabbe to the data store datastore2 {{(pid=68437) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 680.294230] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541504} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.294522] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 27c18765-38cf-41d6-9139-9acffa94fbe6/27c18765-38cf-41d6-9139-9acffa94fbe6.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 680.294694] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 680.294929] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd95d079-acff-41f5-b238-88700a07bc0b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.301408] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 680.301408] env[68437]: value = "task-2943629" [ 680.301408] env[68437]: _type = "Task" [ 680.301408] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.313135] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.347184] env[68437]: DEBUG nova.compute.manager [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 680.359232] env[68437]: DEBUG oslo_vmware.rw_handles [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68437) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 680.423597] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.424229] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.424229] env[68437]: DEBUG nova.network.neutron [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 680.439241] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.439579] env[68437]: DEBUG nova.compute.manager [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 680.442591] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.353s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.445678] env[68437]: INFO nova.compute.claims [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.618651] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a79283bf-5b79-4f78-b289-b6f3e38093f2 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "45595615-59c0-4c59-b18c-b49a3126dbb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.546s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.817114] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079224} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.817684] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 680.818824] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348234a7-9541-40bb-b770-97c5f1129bfe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.846406] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 27c18765-38cf-41d6-9139-9acffa94fbe6/27c18765-38cf-41d6-9139-9acffa94fbe6.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 680.852712] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6e4d85c-3689-43ad-9f37-6bb180cf7fc8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.877864] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 680.877864] env[68437]: value = "task-2943630" [ 680.877864] env[68437]: _type = "Task" [ 680.877864] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.885865] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943630, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.890526] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.950217] env[68437]: DEBUG nova.compute.utils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 680.952900] env[68437]: DEBUG nova.compute.manager [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 680.953146] env[68437]: DEBUG nova.network.neutron [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 681.038824] env[68437]: DEBUG nova.network.neutron [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 681.103800] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquiring lock "1537e626-f2ec-4b5d-bcba-50cd583dff31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.108132] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "1537e626-f2ec-4b5d-bcba-50cd583dff31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.135279] env[68437]: DEBUG nova.policy [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec1074dd1b444e45beadcccfe6671c76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f1c3ca0e78f472e8c127fa68ed610f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 681.216264] env[68437]: DEBUG oslo_vmware.rw_handles [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Completed reading data from the image iterator. {{(pid=68437) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 681.216688] env[68437]: DEBUG oslo_vmware.rw_handles [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 681.323990] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Downloaded image file data a272f526-6b8d-4a29-bd06-cd29ab5fabbe to vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk on the data store datastore2 {{(pid=68437) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 681.325740] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Caching image {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 681.325740] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Copying Virtual Disk [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk to [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 681.325740] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad5b9041-fb3d-4761-978c-085114f6ca58 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.333996] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 681.333996] env[68437]: value = "task-2943631" [ 681.333996] env[68437]: _type = "Task" [ 681.333996] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.341886] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.387834] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943630, 'name': ReconfigVM_Task, 'duration_secs': 0.39021} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.388189] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 27c18765-38cf-41d6-9139-9acffa94fbe6/27c18765-38cf-41d6-9139-9acffa94fbe6.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 681.388789] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53aef870-d0d6-48a1-902d-b87194585c95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.395570] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 681.395570] env[68437]: value = "task-2943632" [ 681.395570] env[68437]: _type = "Task" [ 681.395570] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.405233] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943632, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.445017] env[68437]: DEBUG nova.network.neutron [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updated VIF entry in instance network info cache for port 755ab792-6755-4f3f-8d83-38106672f90b. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 681.445017] env[68437]: DEBUG nova.network.neutron [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updating instance_info_cache with network_info: [{"id": "755ab792-6755-4f3f-8d83-38106672f90b", "address": "fa:16:3e:c5:b6:a0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755ab792-67", "ovs_interfaceid": "755ab792-6755-4f3f-8d83-38106672f90b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.454151] env[68437]: DEBUG nova.compute.manager [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 681.610934] env[68437]: DEBUG nova.compute.manager [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 681.710020] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e044ad68-de06-4e93-8e1b-077be399d9d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.719601] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b7a0dc-367d-40af-92ca-06d261e91fcd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.751253] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde0f8f3-b4f2-4f77-bfe4-5c782db4f4ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.759076] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2c498d-292b-42a2-9041-57c3adeb68f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.773035] env[68437]: DEBUG nova.compute.provider_tree [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.843745] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943631, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.900309] env[68437]: DEBUG nova.network.neutron [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance_info_cache with network_info: [{"id": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "address": "fa:16:3e:fc:76:b9", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd952c0-79", "ovs_interfaceid": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.907284] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943632, 'name': Rename_Task, 'duration_secs': 0.26608} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.909677] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 681.909677] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06653445-bc85-4b5a-9eaa-da3ee18d86bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.915630] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 681.915630] env[68437]: value = "task-2943633" [ 681.915630] env[68437]: _type = "Task" [ 681.915630] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.927032] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.951274] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Releasing lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.951274] env[68437]: DEBUG nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Received event network-vif-plugged-9f47f745-6f86-4d20-929b-376aeda67a6e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 681.951558] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Acquiring lock "ce8fd88b-249b-4fee-80fc-35b795d24658-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.954021] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.954021] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.954021] env[68437]: DEBUG nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] No waiting events found dispatching network-vif-plugged-9f47f745-6f86-4d20-929b-376aeda67a6e {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 681.954021] env[68437]: WARNING nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Received unexpected event network-vif-plugged-9f47f745-6f86-4d20-929b-376aeda67a6e for instance with vm_state building and task_state spawning. [ 681.954021] env[68437]: DEBUG nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Received event network-changed-9f47f745-6f86-4d20-929b-376aeda67a6e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 681.954300] env[68437]: DEBUG nova.compute.manager [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Refreshing instance network info cache due to event network-changed-9f47f745-6f86-4d20-929b-376aeda67a6e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 681.954300] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Acquiring lock "refresh_cache-ce8fd88b-249b-4fee-80fc-35b795d24658" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.954300] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Acquired lock "refresh_cache-ce8fd88b-249b-4fee-80fc-35b795d24658" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.954300] env[68437]: DEBUG nova.network.neutron [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Refreshing network info cache for port 9f47f745-6f86-4d20-929b-376aeda67a6e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 682.048517] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Acquiring lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.048616] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.049032] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Acquiring lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.049108] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.049260] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.051675] env[68437]: INFO nova.compute.manager [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Terminating instance [ 682.117640] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.117967] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.151811] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.280961] env[68437]: DEBUG nova.scheduler.client.report [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 682.346043] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943631, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.405572] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.405955] env[68437]: DEBUG nova.compute.manager [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Instance network_info: |[{"id": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "address": "fa:16:3e:fc:76:b9", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd952c0-79", "ovs_interfaceid": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 682.406419] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:76:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4fd952c0-7921-4632-b5de-2fe90c4bba05', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.414481] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Creating folder: Project (e4b1c4be5c524504ae9346d2e4ec8008). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.415162] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1397fef9-dd89-4b2a-92f6-ac422141b630 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.425637] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943633, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.462440] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Created folder: Project (e4b1c4be5c524504ae9346d2e4ec8008) in parent group-v590848. [ 682.463523] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Creating folder: Instances. Parent ref: group-v590870. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.463523] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a10224f-72b6-4151-b1de-92ee022fc24e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.469540] env[68437]: DEBUG nova.compute.manager [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 682.475443] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Created folder: Instances in parent group-v590870. [ 682.475443] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 682.475443] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 682.475443] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b0ddb65-8358-4f16-9e3e-16bef2fc7c27 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.508814] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.508814] env[68437]: value = "task-2943636" [ 682.508814] env[68437]: _type = "Task" [ 682.508814] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.513072] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 682.514334] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 682.514334] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 682.514334] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 682.514490] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 682.514776] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 682.514878] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 682.514993] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 682.515167] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 682.515325] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 682.515797] env[68437]: DEBUG nova.virt.hardware [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 682.520781] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f562ec5-1882-4262-894b-014819ed707a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.548320] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943636, 'name': CreateVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.551619] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b780a79d-0e6d-4680-953f-fe3bfc04a476 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.557025] env[68437]: DEBUG nova.compute.manager [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 682.557165] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.557859] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd876c1a-d7b1-4521-8329-1ce9c144f0e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.566261] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 682.574339] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c3171b1-3cf6-43a7-bbce-9d9d73f46a61 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.581648] env[68437]: DEBUG oslo_vmware.api [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Waiting for the task: (returnval){ [ 682.581648] env[68437]: value = "task-2943637" [ 682.581648] env[68437]: _type = "Task" [ 682.581648] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.590500] env[68437]: DEBUG oslo_vmware.api [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Task: {'id': task-2943637, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.624146] env[68437]: DEBUG nova.compute.manager [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 682.722867] env[68437]: DEBUG nova.network.neutron [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Successfully created port: ac2dc22a-b9e3-4855-81b4-4f26c019fc72 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.788031] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.788031] env[68437]: DEBUG nova.compute.manager [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 682.794115] env[68437]: DEBUG nova.network.neutron [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Successfully updated port: cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 682.795740] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.906s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.797621] env[68437]: INFO nova.compute.claims [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.852704] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943631, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.074525} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.853022] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Copied Virtual Disk [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk to [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 682.853219] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Deleting the datastore file [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/tmp-sparse.vmdk {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 682.853472] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-806d4949-efa5-46a2-9bb6-e45982554700 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.865024] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 682.865024] env[68437]: value = "task-2943638" [ 682.865024] env[68437]: _type = "Task" [ 682.865024] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.877710] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943638, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.881090] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "2f368262-0825-4ccc-9b1e-523b705bcfce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.881806] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "2f368262-0825-4ccc-9b1e-523b705bcfce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.926754] env[68437]: DEBUG oslo_vmware.api [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2943633, 'name': PowerOnVM_Task, 'duration_secs': 0.610542} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.927482] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 682.927899] env[68437]: INFO nova.compute.manager [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Took 12.77 seconds to spawn the instance on the hypervisor. [ 682.928181] env[68437]: DEBUG nova.compute.manager [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 682.929050] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9911bb95-057c-484d-99a3-d7899e14727d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.002118] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquiring lock "180f77ab-e468-410d-8e41-20291487ef5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.003213] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "180f77ab-e468-410d-8e41-20291487ef5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.003213] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquiring lock "180f77ab-e468-410d-8e41-20291487ef5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.003213] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "180f77ab-e468-410d-8e41-20291487ef5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.003213] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "180f77ab-e468-410d-8e41-20291487ef5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.005820] env[68437]: INFO nova.compute.manager [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Terminating instance [ 683.029220] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943636, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.092706] env[68437]: DEBUG oslo_vmware.api [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Task: {'id': task-2943637, 'name': PowerOffVM_Task, 'duration_secs': 0.368525} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.092706] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 683.092898] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 683.093064] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2bfae19d-3f1d-42b8-aef7-1e5b73d425f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.157650] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.169255] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 683.169669] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 683.169669] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Deleting the datastore file [datastore1] df3fbf16-d3d9-4138-b563-6ea09dd233b8 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.170122] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be110faa-86ce-41f4-966d-dd0d37a0c03c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.181016] env[68437]: DEBUG oslo_vmware.api [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Waiting for the task: (returnval){ [ 683.181016] env[68437]: value = "task-2943640" [ 683.181016] env[68437]: _type = "Task" [ 683.181016] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.190128] env[68437]: DEBUG oslo_vmware.api [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Task: {'id': task-2943640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.298306] env[68437]: DEBUG nova.compute.utils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 683.303817] env[68437]: DEBUG nova.compute.manager [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 683.303817] env[68437]: DEBUG nova.network.neutron [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 683.307616] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquiring lock "refresh_cache-ea330078-a8f2-41f4-a161-5d0e29ddfab5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.307616] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquired lock "refresh_cache-ea330078-a8f2-41f4-a161-5d0e29ddfab5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.307616] env[68437]: DEBUG nova.network.neutron [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 683.383560] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943638, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.059408} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.383827] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.384075] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Moving file from [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588/a272f526-6b8d-4a29-bd06-cd29ab5fabbe to [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe. {{(pid=68437) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 683.384347] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-ebf0de76-d506-4d3a-bd94-ae037f30f175 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.386720] env[68437]: DEBUG nova.compute.manager [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 683.396725] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 683.396725] env[68437]: value = "task-2943641" [ 683.396725] env[68437]: _type = "Task" [ 683.396725] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.409567] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943641, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.457666] env[68437]: INFO nova.compute.manager [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Took 24.38 seconds to build instance. [ 683.516652] env[68437]: DEBUG nova.compute.manager [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 683.517078] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 683.522920] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efd90a0-f741-4e6f-94c7-2a1a5ccb78f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.544144] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943636, 'name': CreateVM_Task, 'duration_secs': 0.693107} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.544144] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 683.544144] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 683.544144] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37d46f6c-0811-4a4a-a552-568875094e73 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.547165] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.547706] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.548049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 683.548395] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e39be243-293e-4110-bb1c-0b0125eac340 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.554458] env[68437]: DEBUG nova.compute.manager [None req-4199a91b-141b-48ef-84e8-a4285dddcf0e tempest-ServerDiagnosticsTest-1311827635 tempest-ServerDiagnosticsTest-1311827635-project-admin] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 683.555951] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd84c2db-540a-43fe-b781-eee726b53aa6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.562197] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 683.562197] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c6b20e-4e96-d952-c694-a3bbbe84a254" [ 683.562197] env[68437]: _type = "Task" [ 683.562197] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.562848] env[68437]: DEBUG oslo_vmware.api [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 683.562848] env[68437]: value = "task-2943642" [ 683.562848] env[68437]: _type = "Task" [ 683.562848] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.572904] env[68437]: INFO nova.compute.manager [None req-4199a91b-141b-48ef-84e8-a4285dddcf0e tempest-ServerDiagnosticsTest-1311827635 tempest-ServerDiagnosticsTest-1311827635-project-admin] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Retrieving diagnostics [ 683.574407] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd040ec3-0714-4688-93f1-f6aff515d6cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.584230] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c6b20e-4e96-d952-c694-a3bbbe84a254, 'name': SearchDatastore_Task, 'duration_secs': 0.018394} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.584485] env[68437]: DEBUG oslo_vmware.api [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.585990] env[68437]: DEBUG nova.policy [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e8a11819f91e486b86a9cc41c1fd7ec5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b002244273f41d89ddf47570ffe6a02', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 683.588097] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.588273] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.589034] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.620989] env[68437]: DEBUG nova.network.neutron [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Updated VIF entry in instance network info cache for port 9f47f745-6f86-4d20-929b-376aeda67a6e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 683.621446] env[68437]: DEBUG nova.network.neutron [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Updating instance_info_cache with network_info: [{"id": "9f47f745-6f86-4d20-929b-376aeda67a6e", "address": "fa:16:3e:80:61:4f", "network": {"id": "0259266c-be3d-4f63-b92f-a450f922ff76", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1548136929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73180707777547908c86bf1771a04d05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f47f745-6f", "ovs_interfaceid": "9f47f745-6f86-4d20-929b-376aeda67a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.693606] env[68437]: DEBUG oslo_vmware.api [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Task: {'id': task-2943640, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280413} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.693877] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.694261] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 683.694369] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 683.694455] env[68437]: INFO nova.compute.manager [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 683.694686] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.694884] env[68437]: DEBUG nova.compute.manager [-] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 683.695017] env[68437]: DEBUG nova.network.neutron [-] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 683.802991] env[68437]: DEBUG nova.compute.manager [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 683.908711] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.914119] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943641, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.034568} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.915464] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] File moved {{(pid=68437) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 683.916267] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Cleaning up location [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588 {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 683.916267] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Deleting the datastore file [datastore2] vmware_temp/e86ce5d9-2a54-4fe1-8cca-d8c8b8620588 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.921263] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46014f01-8731-4030-8e77-edac71421fbc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.922944] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.923179] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.932504] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 683.932504] env[68437]: value = "task-2943643" [ 683.932504] env[68437]: _type = "Task" [ 683.932504] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.942624] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.962561] env[68437]: DEBUG oslo_concurrency.lockutils [None req-751c3a63-bec5-44a2-ab37-ee98d784c422 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.892s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.024396] env[68437]: DEBUG nova.network.neutron [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 684.078396] env[68437]: DEBUG oslo_vmware.api [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943642, 'name': PowerOffVM_Task, 'duration_secs': 0.210317} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.078755] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 684.078937] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 684.079385] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2398143-5297-4d09-b929-e72ddbfc4e93 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.125735] env[68437]: DEBUG oslo_concurrency.lockutils [req-93e465a1-2fca-4f86-b036-deecb9b9ea79 req-44a36686-41b4-4e74-a023-360e4e5d505f service nova] Releasing lock "refresh_cache-ce8fd88b-249b-4fee-80fc-35b795d24658" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.140889] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 684.141121] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 684.141298] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Deleting the datastore file [datastore1] 180f77ab-e468-410d-8e41-20291487ef5d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 684.141551] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd281ef0-01fd-4a66-bbef-310bd2a69c3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.147735] env[68437]: DEBUG oslo_vmware.api [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for the task: (returnval){ [ 684.147735] env[68437]: value = "task-2943645" [ 684.147735] env[68437]: _type = "Task" [ 684.147735] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.149393] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01074ce1-df92-4c87-baad-af9922a5bab0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.162482] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21723b79-808a-4a53-bac6-b1736695952f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.165664] env[68437]: DEBUG oslo_vmware.api [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.200063] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f43d6b5-5c4c-4043-95a1-149c1ef54795 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.210868] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e922b3-412a-45e5-ae65-d193fbeb842e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.227040] env[68437]: DEBUG nova.compute.provider_tree [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.451993] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06979} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.451993] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 684.451993] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e82c4f2-ab98-4df9-a382-79e5a4b28b8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.456997] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 684.456997] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52157ca4-23a4-6d05-ea70-8bcecd9fc11c" [ 684.456997] env[68437]: _type = "Task" [ 684.456997] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.465987] env[68437]: DEBUG nova.compute.manager [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 684.470207] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52157ca4-23a4-6d05-ea70-8bcecd9fc11c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.662330] env[68437]: DEBUG oslo_vmware.api [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Task: {'id': task-2943645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213288} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.663063] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 684.663117] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 684.663421] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 684.663503] env[68437]: INFO nova.compute.manager [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 684.663864] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 684.664092] env[68437]: DEBUG nova.compute.manager [-] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 684.664185] env[68437]: DEBUG nova.network.neutron [-] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 684.735349] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.737244] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.737244] env[68437]: DEBUG nova.scheduler.client.report [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 684.773465] env[68437]: DEBUG nova.network.neutron [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Updating instance_info_cache with network_info: [{"id": "cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49", "address": "fa:16:3e:72:d6:46", "network": {"id": "06eb8d2b-d1b9-4dee-ada5-4dbf2f3a3f1e", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1510705003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "518f367d97ad43bb9653dc4a5137e1bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbc16d3e-2e", "ovs_interfaceid": "cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.813255] env[68437]: DEBUG nova.compute.manager [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 684.846366] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 684.846645] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.846716] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.847548] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.847753] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.847892] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 684.848136] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 684.848301] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 684.848941] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 684.848941] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 684.848941] env[68437]: DEBUG nova.virt.hardware [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 684.849795] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfba7253-4f34-47ea-8fd7-428395ec05f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.859607] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bbba04b-4641-491d-aeb8-8b4cb05dfecf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.954874] env[68437]: DEBUG nova.network.neutron [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Successfully created port: 9fbfd56e-861b-488a-afc9-9efe25097c73 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 684.969631] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52157ca4-23a4-6d05-ea70-8bcecd9fc11c, 'name': SearchDatastore_Task, 'duration_secs': 0.016183} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.969971] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.970346] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ce8fd88b-249b-4fee-80fc-35b795d24658/ce8fd88b-249b-4fee-80fc-35b795d24658.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 684.970724] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.970977] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.973286] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b69de7b-fd87-4a01-b7ef-0b6400cebce2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.978893] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f676f128-504f-44f9-9749-2cb472985bea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.989213] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 684.989213] env[68437]: value = "task-2943646" [ 684.989213] env[68437]: _type = "Task" [ 684.989213] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.994596] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.994909] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.999868] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22ab4013-7a2b-4189-b3ea-f86a329aa05c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.006164] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.012718] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943646, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.013698] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 685.013698] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52eaef05-8ac9-cb0d-1f67-4db8a67e858c" [ 685.013698] env[68437]: _type = "Task" [ 685.013698] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.023081] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52eaef05-8ac9-cb0d-1f67-4db8a67e858c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.245840] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.245840] env[68437]: DEBUG nova.compute.manager [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 685.250135] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.098s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.253885] env[68437]: INFO nova.compute.claims [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.274603] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Releasing lock "refresh_cache-ea330078-a8f2-41f4-a161-5d0e29ddfab5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.274961] env[68437]: DEBUG nova.compute.manager [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Instance network_info: |[{"id": "cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49", "address": "fa:16:3e:72:d6:46", "network": {"id": "06eb8d2b-d1b9-4dee-ada5-4dbf2f3a3f1e", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1510705003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "518f367d97ad43bb9653dc4a5137e1bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbc16d3e-2e", "ovs_interfaceid": "cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 685.277043] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:d6:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.288089] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Creating folder: Project (518f367d97ad43bb9653dc4a5137e1bb). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.288436] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0dd6406e-1cac-42b7-974a-ade415090336 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.301969] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Created folder: Project (518f367d97ad43bb9653dc4a5137e1bb) in parent group-v590848. [ 685.302204] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Creating folder: Instances. Parent ref: group-v590873. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.302508] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-059ca146-9ad9-4928-a7bc-f79cd80fdff4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.317212] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Created folder: Instances in parent group-v590873. [ 685.317368] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.318507] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.318507] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0924ed1c-9be1-492a-937f-e4e338a68cd2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.344678] env[68437]: DEBUG nova.network.neutron [-] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.346048] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.346048] env[68437]: value = "task-2943649" [ 685.346048] env[68437]: _type = "Task" [ 685.346048] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.356926] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943649, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.512256] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943646, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513586} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.512256] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ce8fd88b-249b-4fee-80fc-35b795d24658/ce8fd88b-249b-4fee-80fc-35b795d24658.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 685.512256] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 685.512256] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c6fc4f2-2821-4b31-92c7-bcd03f670bc8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.529138] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52eaef05-8ac9-cb0d-1f67-4db8a67e858c, 'name': SearchDatastore_Task, 'duration_secs': 0.010272} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.531270] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 685.531270] env[68437]: value = "task-2943650" [ 685.531270] env[68437]: _type = "Task" [ 685.531270] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.532977] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5df78d14-76bf-426b-afce-f4b4c3cc10c0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.540903] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 685.540903] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a6e510-3664-5314-a4f3-ef8ae71b25d2" [ 685.540903] env[68437]: _type = "Task" [ 685.540903] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.544336] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943650, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.552419] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a6e510-3664-5314-a4f3-ef8ae71b25d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.752375] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "cf691a81-60e3-40ed-ba80-8f481ff2554b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.752669] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "cf691a81-60e3-40ed-ba80-8f481ff2554b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.767890] env[68437]: DEBUG nova.compute.utils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 685.774125] env[68437]: DEBUG nova.compute.manager [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 685.774125] env[68437]: DEBUG nova.network.neutron [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 685.851109] env[68437]: INFO nova.compute.manager [-] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Took 2.16 seconds to deallocate network for instance. [ 685.872207] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943649, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.963081] env[68437]: DEBUG nova.compute.manager [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Received event network-vif-plugged-4fd952c0-7921-4632-b5de-2fe90c4bba05 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 685.963438] env[68437]: DEBUG oslo_concurrency.lockutils [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] Acquiring lock "1186da93-57aa-40f4-8aae-702d039844d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.965343] env[68437]: DEBUG oslo_concurrency.lockutils [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] Lock "1186da93-57aa-40f4-8aae-702d039844d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.965612] env[68437]: DEBUG oslo_concurrency.lockutils [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] Lock "1186da93-57aa-40f4-8aae-702d039844d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.965954] env[68437]: DEBUG nova.compute.manager [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] No waiting events found dispatching network-vif-plugged-4fd952c0-7921-4632-b5de-2fe90c4bba05 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 685.965954] env[68437]: WARNING nova.compute.manager [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Received unexpected event network-vif-plugged-4fd952c0-7921-4632-b5de-2fe90c4bba05 for instance with vm_state building and task_state spawning. [ 685.966057] env[68437]: DEBUG nova.compute.manager [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Received event network-changed-4fd952c0-7921-4632-b5de-2fe90c4bba05 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 685.966186] env[68437]: DEBUG nova.compute.manager [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Refreshing instance network info cache due to event network-changed-4fd952c0-7921-4632-b5de-2fe90c4bba05. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 685.967103] env[68437]: DEBUG oslo_concurrency.lockutils [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] Acquiring lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.967103] env[68437]: DEBUG oslo_concurrency.lockutils [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] Acquired lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.967103] env[68437]: DEBUG nova.network.neutron [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Refreshing network info cache for port 4fd952c0-7921-4632-b5de-2fe90c4bba05 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 685.971047] env[68437]: DEBUG nova.policy [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '859c80d9e1eb4badb370c62ccccb90b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df4cce8aeb924a148b0dfcc56745f8f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 686.054465] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943650, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.206433} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.065101] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.066035] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab371e98-e392-43b7-aa56-2dc886ee562f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.076693] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a6e510-3664-5314-a4f3-ef8ae71b25d2, 'name': SearchDatastore_Task, 'duration_secs': 0.032618} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.090024] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.090627] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 1186da93-57aa-40f4-8aae-702d039844d4/1186da93-57aa-40f4-8aae-702d039844d4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 686.099770] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] ce8fd88b-249b-4fee-80fc-35b795d24658/ce8fd88b-249b-4fee-80fc-35b795d24658.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.100212] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea74fac1-ae76-4e9f-b9eb-9bb0f200af10 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.102589] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b020e6a-2c45-4c0c-ad5e-8e46777c47c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.129686] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 686.129686] env[68437]: value = "task-2943651" [ 686.129686] env[68437]: _type = "Task" [ 686.129686] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.133415] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 686.133415] env[68437]: value = "task-2943652" [ 686.133415] env[68437]: _type = "Task" [ 686.133415] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.145222] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.148795] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943652, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.279148] env[68437]: DEBUG nova.compute.manager [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 686.361605] env[68437]: DEBUG nova.network.neutron [-] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.363188] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.370361] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943649, 'name': CreateVM_Task, 'duration_secs': 0.701716} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.370361] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.370867] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.371057] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.372454] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 686.372454] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b2f9967-edd5-4b3a-ad6d-42235a170025 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.378717] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 686.378717] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5275ff58-5e4f-e732-1b56-d18bd3454b3c" [ 686.378717] env[68437]: _type = "Task" [ 686.378717] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.393075] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5275ff58-5e4f-e732-1b56-d18bd3454b3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.648816] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.653469] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943652, 'name': ReconfigVM_Task, 'duration_secs': 0.455012} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.653784] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Reconfigured VM instance instance-00000007 to attach disk [datastore2] ce8fd88b-249b-4fee-80fc-35b795d24658/ce8fd88b-249b-4fee-80fc-35b795d24658.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 686.655314] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2d3f71e-83ad-473c-a59b-1e92769cad59 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.657407] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada17588-d223-42a3-857d-15a4bf6eb803 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.667370] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2893dce9-6cd2-44bf-85ab-429bd9a0511b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.670537] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 686.670537] env[68437]: value = "task-2943653" [ 686.670537] env[68437]: _type = "Task" [ 686.670537] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.701650] env[68437]: DEBUG nova.network.neutron [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Successfully updated port: ac2dc22a-b9e3-4855-81b4-4f26c019fc72 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 686.706987] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0563f567-ecca-4a27-af37-269d1dbed8ea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.713049] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943653, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.718872] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda163a7-886d-4cf0-abb4-cc652685607d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.735833] env[68437]: DEBUG nova.compute.provider_tree [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.865708] env[68437]: INFO nova.compute.manager [-] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Took 2.20 seconds to deallocate network for instance. [ 686.891865] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5275ff58-5e4f-e732-1b56-d18bd3454b3c, 'name': SearchDatastore_Task, 'duration_secs': 0.023259} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.892214] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.892479] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.892718] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.892865] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.893070] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.893346] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fbb096a-3c0d-4eda-b0d8-0b527c1a1a56 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.897771] env[68437]: DEBUG nova.network.neutron [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updated VIF entry in instance network info cache for port 4fd952c0-7921-4632-b5de-2fe90c4bba05. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 686.898191] env[68437]: DEBUG nova.network.neutron [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance_info_cache with network_info: [{"id": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "address": "fa:16:3e:fc:76:b9", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd952c0-79", "ovs_interfaceid": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.902392] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.902392] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 686.905657] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d83e165e-d1bc-4dcc-975f-accc4a5c5aad {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.909258] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 686.909258] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525efdda-473e-dc41-f1df-7116fda05565" [ 686.909258] env[68437]: _type = "Task" [ 686.909258] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.923512] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525efdda-473e-dc41-f1df-7116fda05565, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.084764] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "d5db3112-88c7-43af-a434-b91ca69f8559" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.085618] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "d5db3112-88c7-43af-a434-b91ca69f8559" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.146129] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943651, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.190934] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943653, 'name': Rename_Task, 'duration_secs': 0.139456} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.191974] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 687.192107] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fb61801-f07d-4e00-8c1f-9c500431dd32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.202224] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 687.202224] env[68437]: value = "task-2943654" [ 687.202224] env[68437]: _type = "Task" [ 687.202224] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.221936] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "refresh_cache-5abc2c5a-2177-4d77-97ce-872808bb47ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.221936] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "refresh_cache-5abc2c5a-2177-4d77-97ce-872808bb47ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.221936] env[68437]: DEBUG nova.network.neutron [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 687.225028] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943654, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.241783] env[68437]: DEBUG nova.scheduler.client.report [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 687.295762] env[68437]: DEBUG nova.network.neutron [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Successfully created port: bd37d751-cb4d-4517-b9be-bfa192d0fff9 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.300635] env[68437]: DEBUG nova.compute.manager [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 687.332734] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 687.332734] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.332734] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 687.332969] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.332969] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 687.333604] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 687.333604] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 687.333745] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 687.333902] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 687.334079] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 687.334251] env[68437]: DEBUG nova.virt.hardware [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 687.335178] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a128e5-e2b6-47d4-a1b2-8c93349dc5ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.344096] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f4419f-f1a5-4a0d-a1d1-1ddc34422ba5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.376270] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.401120] env[68437]: DEBUG oslo_concurrency.lockutils [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] Releasing lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.401350] env[68437]: DEBUG nova.compute.manager [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Received event network-vif-plugged-cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 687.401530] env[68437]: DEBUG oslo_concurrency.lockutils [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] Acquiring lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.401739] env[68437]: DEBUG oslo_concurrency.lockutils [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.401905] env[68437]: DEBUG oslo_concurrency.lockutils [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.402081] env[68437]: DEBUG nova.compute.manager [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] No waiting events found dispatching network-vif-plugged-cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 687.402254] env[68437]: WARNING nova.compute.manager [req-cb4e0dca-5186-4602-b368-15ed92ead6be req-b44af0e6-2484-4b7c-a245-d75c04d5a471 service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Received unexpected event network-vif-plugged-cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 for instance with vm_state building and task_state spawning. [ 687.420171] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525efdda-473e-dc41-f1df-7116fda05565, 'name': SearchDatastore_Task, 'duration_secs': 0.014032} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.420171] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-386ead85-23a6-4466-9397-92280ab646d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.426413] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 687.426413] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f59d8f-82fb-1adb-73da-19f2d0128744" [ 687.426413] env[68437]: _type = "Task" [ 687.426413] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.437573] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f59d8f-82fb-1adb-73da-19f2d0128744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.459245] env[68437]: DEBUG nova.compute.manager [req-dbf62a79-2359-477d-adac-4f4fc78b6dee req-69dcf4c1-3528-43b3-a481-9fbeac488011 service nova] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Received event network-vif-deleted-803f314e-2d1c-448a-ae52-ae285d8689cb {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 687.490019] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquiring lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.490304] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.490592] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquiring lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.490838] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.491085] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.494129] env[68437]: INFO nova.compute.manager [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Terminating instance [ 687.656344] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943651, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.168481} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.656806] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 1186da93-57aa-40f4-8aae-702d039844d4/1186da93-57aa-40f4-8aae-702d039844d4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 687.656806] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 687.657018] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa782571-a427-4a58-b7c3-c4a45addae28 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.664817] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 687.664817] env[68437]: value = "task-2943655" [ 687.664817] env[68437]: _type = "Task" [ 687.664817] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.675067] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.718334] env[68437]: DEBUG oslo_vmware.api [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943654, 'name': PowerOnVM_Task, 'duration_secs': 0.504383} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.718820] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 687.719329] env[68437]: INFO nova.compute.manager [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Took 15.26 seconds to spawn the instance on the hypervisor. [ 687.719637] env[68437]: DEBUG nova.compute.manager [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 687.721308] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453e4d27-72f1-4cf6-931f-78703f9539ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.751984] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.751984] env[68437]: DEBUG nova.compute.manager [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 687.757460] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.600s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.758696] env[68437]: INFO nova.compute.claims [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.905486] env[68437]: DEBUG nova.network.neutron [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Successfully updated port: 9fbfd56e-861b-488a-afc9-9efe25097c73 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 687.941120] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f59d8f-82fb-1adb-73da-19f2d0128744, 'name': SearchDatastore_Task, 'duration_secs': 0.011143} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.941444] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.941666] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ea330078-a8f2-41f4-a161-5d0e29ddfab5/ea330078-a8f2-41f4-a161-5d0e29ddfab5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.941937] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4f89d18-8b12-4a9c-95ef-0e1840b8fa25 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.949360] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 687.949360] env[68437]: value = "task-2943656" [ 687.949360] env[68437]: _type = "Task" [ 687.949360] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.961198] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943656, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.003135] env[68437]: DEBUG nova.compute.manager [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 688.003135] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.003135] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7781c5f7-e68f-4b09-9432-8423beb19c98 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.010671] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 688.011118] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9536f6b7-08b1-41e8-b863-985acf2f1c4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.017821] env[68437]: DEBUG oslo_vmware.api [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 688.017821] env[68437]: value = "task-2943657" [ 688.017821] env[68437]: _type = "Task" [ 688.017821] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.030947] env[68437]: DEBUG oslo_vmware.api [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943657, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.082786] env[68437]: DEBUG nova.network.neutron [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 688.176628] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097044} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.176941] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 688.178078] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d634f482-e567-42a5-970f-653b99880184 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.204546] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 1186da93-57aa-40f4-8aae-702d039844d4/1186da93-57aa-40f4-8aae-702d039844d4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 688.204923] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2298f031-4352-4546-8928-0938c2300433 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.229396] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 688.229396] env[68437]: value = "task-2943658" [ 688.229396] env[68437]: _type = "Task" [ 688.229396] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.244211] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943658, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.250787] env[68437]: INFO nova.compute.manager [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Took 25.45 seconds to build instance. [ 688.267544] env[68437]: DEBUG nova.compute.utils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 688.271467] env[68437]: DEBUG nova.compute.manager [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 688.271691] env[68437]: DEBUG nova.network.neutron [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 688.338111] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "5435b4d8-46c3-43e3-b11b-cbeb580e2f36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.338111] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "5435b4d8-46c3-43e3-b11b-cbeb580e2f36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.355395] env[68437]: DEBUG nova.policy [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05cecb8a2d8e4fec876c54e984282267', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '025967d11eca4b2fb49264397076e046', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 688.410510] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.410741] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.411074] env[68437]: DEBUG nova.network.neutron [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 688.462358] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943656, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.529073] env[68437]: DEBUG oslo_vmware.api [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943657, 'name': PowerOffVM_Task, 'duration_secs': 0.312847} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.531684] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 688.531875] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 688.532161] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b7be6b6-789e-43de-9f4a-a2647a5022a5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.655478] env[68437]: DEBUG nova.network.neutron [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Updating instance_info_cache with network_info: [{"id": "ac2dc22a-b9e3-4855-81b4-4f26c019fc72", "address": "fa:16:3e:52:05:c8", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac2dc22a-b9", "ovs_interfaceid": "ac2dc22a-b9e3-4855-81b4-4f26c019fc72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.743350] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943658, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.756494] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20aef41f-06fd-4860-99ed-438f09a2348d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.960s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.775319] env[68437]: DEBUG nova.compute.manager [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 688.859262] env[68437]: DEBUG nova.network.neutron [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Successfully created port: 9d6a8621-9098-4af3-9f1b-1579862d0ca3 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.961699] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943656, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577861} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.964447] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ea330078-a8f2-41f4-a161-5d0e29ddfab5/ea330078-a8f2-41f4-a161-5d0e29ddfab5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 688.964674] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 688.965496] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd29aa70-c687-4d35-89e1-41439b06ad5e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.976149] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 688.976149] env[68437]: value = "task-2943660" [ 688.976149] env[68437]: _type = "Task" [ 688.976149] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.989747] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.015160] env[68437]: DEBUG nova.network.neutron [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 689.155190] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f25956f-eb4e-46b5-b437-bf86b5e1cc8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.158420] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "refresh_cache-5abc2c5a-2177-4d77-97ce-872808bb47ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.158705] env[68437]: DEBUG nova.compute.manager [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Instance network_info: |[{"id": "ac2dc22a-b9e3-4855-81b4-4f26c019fc72", "address": "fa:16:3e:52:05:c8", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac2dc22a-b9", "ovs_interfaceid": "ac2dc22a-b9e3-4855-81b4-4f26c019fc72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 689.159108] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:05:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac2dc22a-b9e3-4855-81b4-4f26c019fc72', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.167280] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating folder: Project (3f1c3ca0e78f472e8c127fa68ed610f5). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.168040] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cdfef96e-4ae9-4a4c-9af5-5181333e0fab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.180526] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721d0f51-4a39-4635-87ad-79579664c162 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.188690] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Created folder: Project (3f1c3ca0e78f472e8c127fa68ed610f5) in parent group-v590848. [ 689.188882] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating folder: Instances. Parent ref: group-v590876. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.189675] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ddf66b2e-b37e-4845-8b76-c0318d9c2e88 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.251918] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f220bdad-7550-455d-b95f-da5d34f08db4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.258958] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Created folder: Instances in parent group-v590876. [ 689.258958] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.259521] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.259926] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c222aa1-6111-47be-8119-01557a9db73d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.298835] env[68437]: DEBUG nova.compute.manager [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 689.311251] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943658, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.316474] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfcbdf0-ba87-4a58-bec2-f57ce670bc41 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.323381] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.323381] env[68437]: value = "task-2943663" [ 689.323381] env[68437]: _type = "Task" [ 689.323381] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.343508] env[68437]: DEBUG nova.compute.provider_tree [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.357967] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943663, 'name': CreateVM_Task} progress is 15%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.488646] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943660, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067893} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.488646] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 689.489092] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391d7d0f-9543-41b6-9eb0-e3aa45be37ea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.516155] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] ea330078-a8f2-41f4-a161-5d0e29ddfab5/ea330078-a8f2-41f4-a161-5d0e29ddfab5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 689.517305] env[68437]: DEBUG nova.network.neutron [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Updating instance_info_cache with network_info: [{"id": "9fbfd56e-861b-488a-afc9-9efe25097c73", "address": "fa:16:3e:be:cf:7a", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fbfd56e-86", "ovs_interfaceid": "9fbfd56e-861b-488a-afc9-9efe25097c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.519392] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4fd917d-f782-48ab-b93d-29bfa58e6bc8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.542735] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 689.542735] env[68437]: value = "task-2943664" [ 689.542735] env[68437]: _type = "Task" [ 689.542735] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.554041] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943664, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.626367] env[68437]: DEBUG nova.network.neutron [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Successfully updated port: bd37d751-cb4d-4517-b9be-bfa192d0fff9 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.752921] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943658, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.812853] env[68437]: DEBUG nova.compute.manager [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 689.833795] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943663, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.840025] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.843994] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 689.843994] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 689.843994] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 689.844217] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 689.844217] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 689.844217] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 689.844217] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 689.844217] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 689.844389] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 689.844389] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 689.844552] env[68437]: DEBUG nova.virt.hardware [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 689.845376] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f391ced-dd40-44ef-a490-830260ed7712 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.852744] env[68437]: DEBUG nova.scheduler.client.report [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 689.857265] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f586bd-b100-47ef-a907-f4cbfb045660 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.982293] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "d7c64aa1-44f8-44f4-9fb6-463033837736" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.982842] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "d7c64aa1-44f8-44f4-9fb6-463033837736" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.019938] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.020293] env[68437]: DEBUG nova.compute.manager [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Instance network_info: |[{"id": "9fbfd56e-861b-488a-afc9-9efe25097c73", "address": "fa:16:3e:be:cf:7a", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fbfd56e-86", "ovs_interfaceid": "9fbfd56e-861b-488a-afc9-9efe25097c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 690.020735] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:cf:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd48f0ef6-34e5-44d4-8baf-4470ed96ce73', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9fbfd56e-861b-488a-afc9-9efe25097c73', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 690.032331] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Creating folder: Project (5b002244273f41d89ddf47570ffe6a02). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 690.032331] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33c1a375-3fc8-4166-8306-818f8f1601b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.043023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 690.043023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 690.043023] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Deleting the datastore file [datastore1] ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 690.043023] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-480cc202-a442-4176-89f3-9f2b164d2efb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.047719] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Created folder: Project (5b002244273f41d89ddf47570ffe6a02) in parent group-v590848. [ 690.048104] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Creating folder: Instances. Parent ref: group-v590879. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 690.052276] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3dbe163-766f-4ce2-8bff-64ac04645eb1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.056328] env[68437]: DEBUG oslo_vmware.api [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for the task: (returnval){ [ 690.056328] env[68437]: value = "task-2943666" [ 690.056328] env[68437]: _type = "Task" [ 690.056328] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.059676] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943664, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.064335] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Created folder: Instances in parent group-v590879. [ 690.064571] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 690.065215] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 690.069308] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d708aa90-0446-45be-99d1-c0dffbb25598 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.089312] env[68437]: DEBUG oslo_vmware.api [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943666, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.094267] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 690.094267] env[68437]: value = "task-2943668" [ 690.094267] env[68437]: _type = "Task" [ 690.094267] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.104140] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943668, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.129601] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquiring lock "refresh_cache-0649ee2f-cd90-4597-b7c4-09f2acaf3f54" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.130126] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquired lock "refresh_cache-0649ee2f-cd90-4597-b7c4-09f2acaf3f54" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.130555] env[68437]: DEBUG nova.network.neutron [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 690.252104] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943658, 'name': ReconfigVM_Task, 'duration_secs': 1.536236} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.252487] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 1186da93-57aa-40f4-8aae-702d039844d4/1186da93-57aa-40f4-8aae-702d039844d4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 690.253073] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a2c0b276-5bc6-4386-96fe-86b8d5c9d435 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.262487] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 690.262487] env[68437]: value = "task-2943669" [ 690.262487] env[68437]: _type = "Task" [ 690.262487] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.272583] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943669, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.338238] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943663, 'name': CreateVM_Task, 'duration_secs': 0.947573} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.338490] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 690.339460] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.339661] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.339997] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 690.340321] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9856a483-72f0-47c7-bca2-d1f1f91daecd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.346485] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 690.346485] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5243a42a-ac58-3f29-3907-d71d720da5cc" [ 690.346485] env[68437]: _type = "Task" [ 690.346485] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.357083] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5243a42a-ac58-3f29-3907-d71d720da5cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.364492] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.364915] env[68437]: DEBUG nova.compute.manager [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 690.368975] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.460s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.370925] env[68437]: INFO nova.compute.claims [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.561587] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943664, 'name': ReconfigVM_Task, 'duration_secs': 0.616514} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.566254] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Reconfigured VM instance instance-00000009 to attach disk [datastore2] ea330078-a8f2-41f4-a161-5d0e29ddfab5/ea330078-a8f2-41f4-a161-5d0e29ddfab5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 690.566254] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-162ebe6f-9084-4e31-87b6-e7fc4d91d4cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.572040] env[68437]: DEBUG oslo_vmware.api [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Task: {'id': task-2943666, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16824} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.573633] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 690.573633] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 690.573774] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 690.573970] env[68437]: INFO nova.compute.manager [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Took 2.57 seconds to destroy the instance on the hypervisor. [ 690.574263] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 690.576872] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 690.576872] env[68437]: value = "task-2943670" [ 690.576872] env[68437]: _type = "Task" [ 690.576872] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.576872] env[68437]: DEBUG nova.compute.manager [-] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 690.576872] env[68437]: DEBUG nova.network.neutron [-] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 690.585252] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943670, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.598700] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Received event network-changed-cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 690.598700] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Refreshing instance network info cache due to event network-changed-cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 690.598700] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Acquiring lock "refresh_cache-ea330078-a8f2-41f4-a161-5d0e29ddfab5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.598895] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Acquired lock "refresh_cache-ea330078-a8f2-41f4-a161-5d0e29ddfab5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.598985] env[68437]: DEBUG nova.network.neutron [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Refreshing network info cache for port cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 690.610163] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943668, 'name': CreateVM_Task, 'duration_secs': 0.363787} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.610798] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 690.611319] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.701215] env[68437]: DEBUG nova.network.neutron [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 690.779362] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943669, 'name': Rename_Task, 'duration_secs': 0.193602} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.779624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.780069] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8b8b155-ebf2-47d0-bd42-3659a730d93b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.787146] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 690.787146] env[68437]: value = "task-2943671" [ 690.787146] env[68437]: _type = "Task" [ 690.787146] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.795272] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.857815] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5243a42a-ac58-3f29-3907-d71d720da5cc, 'name': SearchDatastore_Task, 'duration_secs': 0.010031} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.858151] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.858391] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.858630] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.858777] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.858957] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 690.859260] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.859567] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 690.859837] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b09b79d-306a-4eff-bddc-4ab1f0a36b7d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.862425] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84c47080-6fa1-49de-af3f-172acc07fb05 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.873109] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 690.873109] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5256e83b-0c30-718c-cad1-647db5b13d3c" [ 690.873109] env[68437]: _type = "Task" [ 690.873109] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.873109] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 690.873109] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 690.876414] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dc9faa2-6969-4d9d-b273-f40f86ca38d7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.879543] env[68437]: DEBUG nova.compute.utils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 690.884771] env[68437]: DEBUG nova.compute.manager [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 690.884771] env[68437]: DEBUG nova.network.neutron [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 690.891335] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 690.891335] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a7f3f1-b62d-b08f-1c7a-b95268fe4857" [ 690.891335] env[68437]: _type = "Task" [ 690.891335] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.891548] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5256e83b-0c30-718c-cad1-647db5b13d3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.905134] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a7f3f1-b62d-b08f-1c7a-b95268fe4857, 'name': SearchDatastore_Task, 'duration_secs': 0.013942} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.906028] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01103d08-081a-4da0-8b81-ba1a65ead67d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.911447] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 690.911447] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525364c9-36e3-9923-b5de-2018facc9e52" [ 690.911447] env[68437]: _type = "Task" [ 690.911447] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.919754] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525364c9-36e3-9923-b5de-2018facc9e52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.968671] env[68437]: DEBUG nova.network.neutron [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Updating instance_info_cache with network_info: [{"id": "bd37d751-cb4d-4517-b9be-bfa192d0fff9", "address": "fa:16:3e:3a:c4:09", "network": {"id": "0e753c56-c852-47cd-beff-ae9a78e80fae", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1790064924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "df4cce8aeb924a148b0dfcc56745f8f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd37d751-cb", "ovs_interfaceid": "bd37d751-cb4d-4517-b9be-bfa192d0fff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.070211] env[68437]: DEBUG nova.policy [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7536b751d42c4b5889c055d32268f93c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '06bbda421d194770bfc1b9624522a665', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 691.087030] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943670, 'name': Rename_Task, 'duration_secs': 0.140676} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.087301] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 691.087567] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3aad5c3d-4dca-45bf-be69-35a915390d44 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.093938] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 691.093938] env[68437]: value = "task-2943672" [ 691.093938] env[68437]: _type = "Task" [ 691.093938] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.101487] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.298899] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943671, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.388330] env[68437]: DEBUG nova.compute.manager [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 691.391508] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5256e83b-0c30-718c-cad1-647db5b13d3c, 'name': SearchDatastore_Task, 'duration_secs': 0.016883} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.391792] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.392029] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 691.392244] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.425460] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525364c9-36e3-9923-b5de-2018facc9e52, 'name': SearchDatastore_Task, 'duration_secs': 0.015868} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.428554] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.428845] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 5abc2c5a-2177-4d77-97ce-872808bb47ee/5abc2c5a-2177-4d77-97ce-872808bb47ee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 691.429620] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.429819] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 691.430056] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb2965b1-7fdb-48da-9385-8a7443248a1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.435031] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fc8a3d0-1387-4b42-9f89-240522cecbf6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.444934] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 691.444934] env[68437]: value = "task-2943673" [ 691.444934] env[68437]: _type = "Task" [ 691.444934] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.446942] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 691.447234] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 691.451032] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7925129-2840-4e5c-a469-973c8ca6a9ad {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.458443] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943673, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.459731] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 691.459731] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f7c4c9-65b5-3cfd-767d-cceb7daa216f" [ 691.459731] env[68437]: _type = "Task" [ 691.459731] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.476781] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Releasing lock "refresh_cache-0649ee2f-cd90-4597-b7c4-09f2acaf3f54" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.477142] env[68437]: DEBUG nova.compute.manager [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Instance network_info: |[{"id": "bd37d751-cb4d-4517-b9be-bfa192d0fff9", "address": "fa:16:3e:3a:c4:09", "network": {"id": "0e753c56-c852-47cd-beff-ae9a78e80fae", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1790064924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "df4cce8aeb924a148b0dfcc56745f8f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd37d751-cb", "ovs_interfaceid": "bd37d751-cb4d-4517-b9be-bfa192d0fff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 691.477435] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f7c4c9-65b5-3cfd-767d-cceb7daa216f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.477775] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:c4:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e350f83a-f581-4e10-ac16-0b0f7bfd3d38', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd37d751-cb4d-4517-b9be-bfa192d0fff9', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 691.490515] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Creating folder: Project (df4cce8aeb924a148b0dfcc56745f8f3). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 691.494654] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-614a39c1-b124-4c85-9439-6b4adb679a4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.504951] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Created folder: Project (df4cce8aeb924a148b0dfcc56745f8f3) in parent group-v590848. [ 691.505599] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Creating folder: Instances. Parent ref: group-v590882. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 691.505599] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4a7d229-1ee3-4606-92d7-5e8081bba8e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.519228] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Created folder: Instances in parent group-v590882. [ 691.519228] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 691.519228] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 691.519228] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-782dcb8a-fb0b-4f66-951f-acac9f533462 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.540175] env[68437]: DEBUG nova.network.neutron [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Successfully updated port: 9d6a8621-9098-4af3-9f1b-1579862d0ca3 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 691.549443] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 691.549443] env[68437]: value = "task-2943676" [ 691.549443] env[68437]: _type = "Task" [ 691.549443] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.562315] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943676, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.609966] env[68437]: DEBUG oslo_vmware.api [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2943672, 'name': PowerOnVM_Task, 'duration_secs': 0.450867} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.609966] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.609966] env[68437]: INFO nova.compute.manager [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Took 11.51 seconds to spawn the instance on the hypervisor. [ 691.609966] env[68437]: DEBUG nova.compute.manager [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.610852] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d808d292-0148-45ba-af4d-91101611cebc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.714670] env[68437]: DEBUG nova.network.neutron [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Successfully created port: ad41ce28-cc67-4b33-b2d2-257330d41543 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 691.799642] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a9d962-c58e-4199-a6d5-7685af49d898 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.808736] env[68437]: DEBUG oslo_vmware.api [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943671, 'name': PowerOnVM_Task, 'duration_secs': 0.523267} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.811087] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.811942] env[68437]: INFO nova.compute.manager [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Took 14.06 seconds to spawn the instance on the hypervisor. [ 691.812163] env[68437]: DEBUG nova.compute.manager [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.813468] env[68437]: DEBUG nova.network.neutron [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Updated VIF entry in instance network info cache for port cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 691.813949] env[68437]: DEBUG nova.network.neutron [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Updating instance_info_cache with network_info: [{"id": "cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49", "address": "fa:16:3e:72:d6:46", "network": {"id": "06eb8d2b-d1b9-4dee-ada5-4dbf2f3a3f1e", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1510705003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "518f367d97ad43bb9653dc4a5137e1bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbc16d3e-2e", "ovs_interfaceid": "cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.816434] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977dc0a2-4cfb-47fd-9729-c38cb9e914e9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.820806] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30ac27a-c7e3-48cd-b28b-ab8adc4a25b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.861172] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae2385f-0e51-45d0-9d20-48e4442d4fea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.876602] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263b86cb-991f-4793-a818-7594ab7251cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.899473] env[68437]: DEBUG nova.compute.provider_tree [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.916221] env[68437]: DEBUG nova.compute.manager [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Received event network-changed-095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 691.916488] env[68437]: DEBUG nova.compute.manager [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Refreshing instance network info cache due to event network-changed-095e5fc1-9fd6-4b04-b1af-3637ee220d7c. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 691.916699] env[68437]: DEBUG oslo_concurrency.lockutils [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] Acquiring lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.917105] env[68437]: DEBUG oslo_concurrency.lockutils [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] Acquired lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.917244] env[68437]: DEBUG nova.network.neutron [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Refreshing network info cache for port 095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 691.960374] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943673, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.982041] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f7c4c9-65b5-3cfd-767d-cceb7daa216f, 'name': SearchDatastore_Task, 'duration_secs': 0.035198} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.982041] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a89369d2-9ca8-4b87-93f8-dd39f6a56c28 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.991218] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 691.991218] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d121f3-8719-caa4-4458-5e651e34ee85" [ 691.991218] env[68437]: _type = "Task" [ 691.991218] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.000726] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d121f3-8719-caa4-4458-5e651e34ee85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.016300] env[68437]: DEBUG nova.network.neutron [-] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.045336] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquiring lock "refresh_cache-1537e626-f2ec-4b5d-bcba-50cd583dff31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.045598] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquired lock "refresh_cache-1537e626-f2ec-4b5d-bcba-50cd583dff31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.045667] env[68437]: DEBUG nova.network.neutron [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 692.065850] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943676, 'name': CreateVM_Task, 'duration_secs': 0.443861} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.065905] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 692.066666] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.066829] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.067199] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 692.067525] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94e2e602-4ff9-4c5e-9082-48294c198f4d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.072801] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 692.072801] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52589f08-1825-79b6-38f5-fae3e1d88362" [ 692.072801] env[68437]: _type = "Task" [ 692.072801] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.081983] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52589f08-1825-79b6-38f5-fae3e1d88362, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.136408] env[68437]: INFO nova.compute.manager [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Took 18.15 seconds to build instance. [ 692.326869] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Releasing lock "refresh_cache-ea330078-a8f2-41f4-a161-5d0e29ddfab5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.326869] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Received event network-vif-deleted-9e8115c0-b1ad-464a-9628-a7845a89de10 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 692.327084] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Received event network-vif-plugged-ac2dc22a-b9e3-4855-81b4-4f26c019fc72 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 692.327290] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Acquiring lock "5abc2c5a-2177-4d77-97ce-872808bb47ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.327499] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.327688] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.327857] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] No waiting events found dispatching network-vif-plugged-ac2dc22a-b9e3-4855-81b4-4f26c019fc72 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 692.328039] env[68437]: WARNING nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Received unexpected event network-vif-plugged-ac2dc22a-b9e3-4855-81b4-4f26c019fc72 for instance with vm_state building and task_state spawning. [ 692.328204] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Received event network-changed-ac2dc22a-b9e3-4855-81b4-4f26c019fc72 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 692.328358] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Refreshing instance network info cache due to event network-changed-ac2dc22a-b9e3-4855-81b4-4f26c019fc72. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 692.328540] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Acquiring lock "refresh_cache-5abc2c5a-2177-4d77-97ce-872808bb47ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.328673] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Acquired lock "refresh_cache-5abc2c5a-2177-4d77-97ce-872808bb47ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.328831] env[68437]: DEBUG nova.network.neutron [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Refreshing network info cache for port ac2dc22a-b9e3-4855-81b4-4f26c019fc72 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 692.385254] env[68437]: INFO nova.compute.manager [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Took 19.07 seconds to build instance. [ 692.410417] env[68437]: DEBUG nova.compute.manager [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 692.435959] env[68437]: DEBUG nova.scheduler.client.report [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 692.462784] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943673, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.636548} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.467512] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 5abc2c5a-2177-4d77-97ce-872808bb47ee/5abc2c5a-2177-4d77-97ce-872808bb47ee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 692.467512] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 692.467512] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5b9502b-b340-479c-aeab-b139f8e71643 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.485275] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 692.485275] env[68437]: value = "task-2943677" [ 692.485275] env[68437]: _type = "Task" [ 692.485275] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.492953] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943677, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.500832] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 692.501219] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.501424] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 692.501680] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.501850] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 692.502066] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 692.502309] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 692.502436] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 692.502646] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 692.502897] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 692.503168] env[68437]: DEBUG nova.virt.hardware [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 692.504802] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3bd664-9328-4916-bf9f-a335faaefa4b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.513961] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d121f3-8719-caa4-4458-5e651e34ee85, 'name': SearchDatastore_Task, 'duration_secs': 0.05441} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.514919] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.515286] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 19dde8dd-eae6-41a0-b147-c505db1cda15/19dde8dd-eae6-41a0-b147-c505db1cda15.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 692.515660] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e9a8803-d4f1-4929-a43b-ee4695e065e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.521545] env[68437]: INFO nova.compute.manager [-] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Took 1.95 seconds to deallocate network for instance. [ 692.526063] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c462c418-5d88-48e4-b787-81ac12884b86 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.535798] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 692.535798] env[68437]: value = "task-2943678" [ 692.535798] env[68437]: _type = "Task" [ 692.535798] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.558438] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943678, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.590830] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52589f08-1825-79b6-38f5-fae3e1d88362, 'name': SearchDatastore_Task, 'duration_secs': 0.013241} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.591199] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.591450] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 692.591709] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.591884] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.592132] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 692.594272] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9d3048a-5431-4e82-a750-543d7ec8abae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.602845] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 692.605388] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 692.605388] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fc824fa-032a-47f7-a283-151613ed2362 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.611325] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 692.611325] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52cca60e-ec8e-fc53-fd2b-1d3c9ae61a69" [ 692.611325] env[68437]: _type = "Task" [ 692.611325] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.623138] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52cca60e-ec8e-fc53-fd2b-1d3c9ae61a69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.629573] env[68437]: DEBUG nova.network.neutron [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 692.637954] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6da98c8b-8e76-4169-88b3-511a086de16a tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.667s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.890973] env[68437]: DEBUG oslo_concurrency.lockutils [None req-350cd5a8-7592-47f7-a656-caf721b14f6f tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.580s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.936803] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.937329] env[68437]: DEBUG nova.compute.manager [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 692.940496] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.935s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.945712] env[68437]: INFO nova.compute.claims [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.994855] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943677, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066544} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.994855] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 692.995525] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0caf7e85-4bc9-4c77-b417-d8358524ed6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.024708] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 5abc2c5a-2177-4d77-97ce-872808bb47ee/5abc2c5a-2177-4d77-97ce-872808bb47ee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 693.026160] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b188ddd9-698f-49fa-a74d-8e92b9e37245 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.041859] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.054146] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943678, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.055120] env[68437]: DEBUG nova.network.neutron [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Updated VIF entry in instance network info cache for port 095e5fc1-9fd6-4b04-b1af-3637ee220d7c. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 693.055508] env[68437]: DEBUG nova.network.neutron [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Updating instance_info_cache with network_info: [{"id": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "address": "fa:16:3e:c4:8f:8d", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap095e5fc1-9f", "ovs_interfaceid": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.058142] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 693.058142] env[68437]: value = "task-2943679" [ 693.058142] env[68437]: _type = "Task" [ 693.058142] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.067752] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943679, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.095168] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.095398] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.128202] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52cca60e-ec8e-fc53-fd2b-1d3c9ae61a69, 'name': SearchDatastore_Task, 'duration_secs': 0.013095} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.129126] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ebaf50b-d10e-42c2-8c0f-4742c9fa4880 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.136288] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 693.136288] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5276b2b0-47d6-9a28-c7d8-c82c77a16610" [ 693.136288] env[68437]: _type = "Task" [ 693.136288] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.140107] env[68437]: DEBUG nova.compute.manager [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 693.148899] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5276b2b0-47d6-9a28-c7d8-c82c77a16610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.299519] env[68437]: DEBUG nova.network.neutron [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Updating instance_info_cache with network_info: [{"id": "9d6a8621-9098-4af3-9f1b-1579862d0ca3", "address": "fa:16:3e:d3:3f:6b", "network": {"id": "3c1fc0bf-b55a-48d0-a9c8-d98001d2fe5d", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1137126005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "025967d11eca4b2fb49264397076e046", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d6a8621-90", "ovs_interfaceid": "9d6a8621-9098-4af3-9f1b-1579862d0ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.393850] env[68437]: DEBUG nova.compute.manager [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 693.442918] env[68437]: DEBUG nova.compute.utils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 693.444769] env[68437]: DEBUG nova.compute.manager [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 693.444957] env[68437]: DEBUG nova.network.neutron [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 693.554220] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943678, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.762431} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.554220] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 19dde8dd-eae6-41a0-b147-c505db1cda15/19dde8dd-eae6-41a0-b147-c505db1cda15.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 693.554220] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 693.554460] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3219292a-c599-4932-aa1f-82cf99dd7292 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.563730] env[68437]: DEBUG nova.policy [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8f30aea8b2b4ea1a6eb7d30875a4c5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '188b948736f44dfa8dd9aeb258180c58', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 693.567297] env[68437]: DEBUG oslo_concurrency.lockutils [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] Releasing lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.567297] env[68437]: DEBUG nova.compute.manager [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Received event network-changed-095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 693.567588] env[68437]: DEBUG nova.compute.manager [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Refreshing instance network info cache due to event network-changed-095e5fc1-9fd6-4b04-b1af-3637ee220d7c. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 693.568322] env[68437]: DEBUG oslo_concurrency.lockutils [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] Acquiring lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.568545] env[68437]: DEBUG oslo_concurrency.lockutils [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] Acquired lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.568797] env[68437]: DEBUG nova.network.neutron [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Refreshing network info cache for port 095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 693.570130] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 693.570130] env[68437]: value = "task-2943680" [ 693.570130] env[68437]: _type = "Task" [ 693.570130] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.578919] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943679, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.584310] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943680, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.650829] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5276b2b0-47d6-9a28-c7d8-c82c77a16610, 'name': SearchDatastore_Task, 'duration_secs': 0.033254} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.654889] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.654889] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 0649ee2f-cd90-4597-b7c4-09f2acaf3f54/0649ee2f-cd90-4597-b7c4-09f2acaf3f54.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 693.654889] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1cf71832-e24e-4a82-8d71-b1aea7085cfd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.663536] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 693.663536] env[68437]: value = "task-2943681" [ 693.663536] env[68437]: _type = "Task" [ 693.663536] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.675900] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943681, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.676311] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.707113] env[68437]: DEBUG nova.network.neutron [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Updated VIF entry in instance network info cache for port ac2dc22a-b9e3-4855-81b4-4f26c019fc72. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 693.707479] env[68437]: DEBUG nova.network.neutron [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Updating instance_info_cache with network_info: [{"id": "ac2dc22a-b9e3-4855-81b4-4f26c019fc72", "address": "fa:16:3e:52:05:c8", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac2dc22a-b9", "ovs_interfaceid": "ac2dc22a-b9e3-4855-81b4-4f26c019fc72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.802569] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Releasing lock "refresh_cache-1537e626-f2ec-4b5d-bcba-50cd583dff31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.802959] env[68437]: DEBUG nova.compute.manager [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Instance network_info: |[{"id": "9d6a8621-9098-4af3-9f1b-1579862d0ca3", "address": "fa:16:3e:d3:3f:6b", "network": {"id": "3c1fc0bf-b55a-48d0-a9c8-d98001d2fe5d", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1137126005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "025967d11eca4b2fb49264397076e046", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d6a8621-90", "ovs_interfaceid": "9d6a8621-9098-4af3-9f1b-1579862d0ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 693.803220] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:3f:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d6a8621-9098-4af3-9f1b-1579862d0ca3', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 693.817735] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Creating folder: Project (025967d11eca4b2fb49264397076e046). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.817735] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0c39bea-d45e-407f-ad28-795509c7fbe0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.828203] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Created folder: Project (025967d11eca4b2fb49264397076e046) in parent group-v590848. [ 693.828509] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Creating folder: Instances. Parent ref: group-v590885. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.828699] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccbf46fd-213f-4fd9-b38d-1c82b223d48d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.838645] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Created folder: Instances in parent group-v590885. [ 693.838767] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.838973] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 693.839212] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9b18cff-90f9-44db-9e6c-b58919753af0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.866339] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 693.866339] env[68437]: value = "task-2943684" [ 693.866339] env[68437]: _type = "Task" [ 693.866339] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.878496] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943684, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.925187] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.949341] env[68437]: DEBUG nova.compute.manager [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 694.008139] env[68437]: DEBUG nova.network.neutron [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Successfully updated port: ad41ce28-cc67-4b33-b2d2-257330d41543 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 694.081898] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943679, 'name': ReconfigVM_Task, 'duration_secs': 0.822057} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.085637] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 5abc2c5a-2177-4d77-97ce-872808bb47ee/5abc2c5a-2177-4d77-97ce-872808bb47ee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 694.086741] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97f88d66-8e9b-4f3e-ab30-c963e1cb39c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.097468] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943680, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.230404} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.099201] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 694.099530] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 694.099530] env[68437]: value = "task-2943685" [ 694.099530] env[68437]: _type = "Task" [ 694.099530] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.101163] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af73559-bd20-45a8-bac5-f2138ebf557d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.120710] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "ce8fd88b-249b-4fee-80fc-35b795d24658" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.121421] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.121535] env[68437]: INFO nova.compute.manager [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Rebooting instance [ 694.123320] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943685, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.145573] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 19dde8dd-eae6-41a0-b147-c505db1cda15/19dde8dd-eae6-41a0-b147-c505db1cda15.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.150810] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6b1f6e9-d854-43f9-9147-e8b6dcee9618 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.171029] env[68437]: DEBUG nova.network.neutron [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Successfully created port: a915dbf0-9e3f-41da-b43b-dd0a4225b839 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.185388] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943681, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.190671] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 694.190671] env[68437]: value = "task-2943686" [ 694.190671] env[68437]: _type = "Task" [ 694.190671] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.205250] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943686, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.210996] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Releasing lock "refresh_cache-5abc2c5a-2177-4d77-97ce-872808bb47ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.211372] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Received event network-vif-plugged-9fbfd56e-861b-488a-afc9-9efe25097c73 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 694.211732] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Acquiring lock "19dde8dd-eae6-41a0-b147-c505db1cda15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.212462] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Lock "19dde8dd-eae6-41a0-b147-c505db1cda15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.212462] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Lock "19dde8dd-eae6-41a0-b147-c505db1cda15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.212462] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] No waiting events found dispatching network-vif-plugged-9fbfd56e-861b-488a-afc9-9efe25097c73 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 694.212462] env[68437]: WARNING nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Received unexpected event network-vif-plugged-9fbfd56e-861b-488a-afc9-9efe25097c73 for instance with vm_state building and task_state spawning. [ 694.212847] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Received event network-changed-9fbfd56e-861b-488a-afc9-9efe25097c73 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 694.212847] env[68437]: DEBUG nova.compute.manager [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Refreshing instance network info cache due to event network-changed-9fbfd56e-861b-488a-afc9-9efe25097c73. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 694.213288] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Acquiring lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.213288] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Acquired lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.213607] env[68437]: DEBUG nova.network.neutron [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Refreshing network info cache for port 9fbfd56e-861b-488a-afc9-9efe25097c73 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 694.380368] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943684, 'name': CreateVM_Task, 'duration_secs': 0.438856} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.385566] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 694.387628] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.387731] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.388100] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 694.391101] env[68437]: DEBUG nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Received event network-vif-plugged-bd37d751-cb4d-4517-b9be-bfa192d0fff9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 694.391101] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Acquiring lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.391101] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.391101] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.391101] env[68437]: DEBUG nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] No waiting events found dispatching network-vif-plugged-bd37d751-cb4d-4517-b9be-bfa192d0fff9 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 694.391798] env[68437]: WARNING nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Received unexpected event network-vif-plugged-bd37d751-cb4d-4517-b9be-bfa192d0fff9 for instance with vm_state building and task_state spawning. [ 694.391798] env[68437]: DEBUG nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Received event network-changed-bd37d751-cb4d-4517-b9be-bfa192d0fff9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 694.391798] env[68437]: DEBUG nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Refreshing instance network info cache due to event network-changed-bd37d751-cb4d-4517-b9be-bfa192d0fff9. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 694.391798] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Acquiring lock "refresh_cache-0649ee2f-cd90-4597-b7c4-09f2acaf3f54" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.391798] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Acquired lock "refresh_cache-0649ee2f-cd90-4597-b7c4-09f2acaf3f54" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.392030] env[68437]: DEBUG nova.network.neutron [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Refreshing network info cache for port bd37d751-cb4d-4517-b9be-bfa192d0fff9 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 694.393040] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb7f49c9-7701-4636-a9eb-cddbfced824a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.399063] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 694.399063] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5296496c-db9b-c816-1e7c-6d912a7ca839" [ 694.399063] env[68437]: _type = "Task" [ 694.399063] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.412207] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5296496c-db9b-c816-1e7c-6d912a7ca839, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.451080] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df38473e-9361-446a-8f98-a93383169c6e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.466419] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2081bc-e308-43a4-9a18-55b6d8a4dd81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.504617] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832db3d5-0fbe-4ff6-b11b-6d0a7231063a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.507545] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "refresh_cache-a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.507699] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquired lock "refresh_cache-a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.507859] env[68437]: DEBUG nova.network.neutron [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 694.512621] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163a026e-e03c-4f03-b334-5dcbd58196eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.517716] env[68437]: DEBUG nova.network.neutron [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Updated VIF entry in instance network info cache for port 095e5fc1-9fd6-4b04-b1af-3637ee220d7c. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 694.518116] env[68437]: DEBUG nova.network.neutron [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Updating instance_info_cache with network_info: [{"id": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "address": "fa:16:3e:c4:8f:8d", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap095e5fc1-9f", "ovs_interfaceid": "095e5fc1-9fd6-4b04-b1af-3637ee220d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.530719] env[68437]: DEBUG nova.compute.provider_tree [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.617061] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943685, 'name': Rename_Task, 'duration_secs': 0.198724} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.617688] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 694.618045] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c79a94a0-b50d-4d4e-adf8-8e370f264613 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.627219] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 694.627219] env[68437]: value = "task-2943687" [ 694.627219] env[68437]: _type = "Task" [ 694.627219] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.634597] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.678568] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "refresh_cache-ce8fd88b-249b-4fee-80fc-35b795d24658" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.678568] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquired lock "refresh_cache-ce8fd88b-249b-4fee-80fc-35b795d24658" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.678568] env[68437]: DEBUG nova.network.neutron [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 694.684221] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943681, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671847} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.684507] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 0649ee2f-cd90-4597-b7c4-09f2acaf3f54/0649ee2f-cd90-4597-b7c4-09f2acaf3f54.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 694.684742] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 694.685189] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0ba8554-8268-4768-b458-3eb7a9785138 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.692116] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 694.692116] env[68437]: value = "task-2943688" [ 694.692116] env[68437]: _type = "Task" [ 694.692116] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.706285] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943686, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.709145] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943688, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.910713] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5296496c-db9b-c816-1e7c-6d912a7ca839, 'name': SearchDatastore_Task, 'duration_secs': 0.013206} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.910713] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.911299] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 694.911299] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.911455] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.911658] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.912025] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0927b131-caf1-4778-b59d-29b7894fe4ee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.921798] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.921798] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 694.922558] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0b88110-62e2-46bc-8064-25586cf65d55 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.928475] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 694.928475] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52774268-2f88-b93d-19ed-53cd4c100b79" [ 694.928475] env[68437]: _type = "Task" [ 694.928475] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.938724] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52774268-2f88-b93d-19ed-53cd4c100b79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.971031] env[68437]: DEBUG nova.compute.manager [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 694.992323] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 694.992483] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 694.992583] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 694.992852] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 694.993016] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 694.997114] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 694.997114] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 694.997114] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 694.997114] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 694.997114] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 694.997316] env[68437]: DEBUG nova.virt.hardware [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 694.997316] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd189de-47cb-4bda-b507-01a7f41acbc4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.008850] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316c4854-df35-41ea-b283-e4efaa575289 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.029576] env[68437]: DEBUG oslo_concurrency.lockutils [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] Releasing lock "refresh_cache-45595615-59c0-4c59-b18c-b49a3126dbb7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.029701] env[68437]: DEBUG nova.compute.manager [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Received event network-changed-755ab792-6755-4f3f-8d83-38106672f90b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 695.029935] env[68437]: DEBUG nova.compute.manager [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Refreshing instance network info cache due to event network-changed-755ab792-6755-4f3f-8d83-38106672f90b. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 695.030198] env[68437]: DEBUG oslo_concurrency.lockutils [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] Acquiring lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.030505] env[68437]: DEBUG oslo_concurrency.lockutils [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] Acquired lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.030718] env[68437]: DEBUG nova.network.neutron [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Refreshing network info cache for port 755ab792-6755-4f3f-8d83-38106672f90b {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 695.034035] env[68437]: DEBUG nova.scheduler.client.report [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 695.082934] env[68437]: DEBUG nova.network.neutron [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Updated VIF entry in instance network info cache for port 9fbfd56e-861b-488a-afc9-9efe25097c73. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 695.082934] env[68437]: DEBUG nova.network.neutron [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Updating instance_info_cache with network_info: [{"id": "9fbfd56e-861b-488a-afc9-9efe25097c73", "address": "fa:16:3e:be:cf:7a", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fbfd56e-86", "ovs_interfaceid": "9fbfd56e-861b-488a-afc9-9efe25097c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.110254] env[68437]: DEBUG nova.network.neutron [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 695.136973] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943687, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.168212] env[68437]: DEBUG nova.network.neutron [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Updated VIF entry in instance network info cache for port bd37d751-cb4d-4517-b9be-bfa192d0fff9. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 695.168576] env[68437]: DEBUG nova.network.neutron [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Updating instance_info_cache with network_info: [{"id": "bd37d751-cb4d-4517-b9be-bfa192d0fff9", "address": "fa:16:3e:3a:c4:09", "network": {"id": "0e753c56-c852-47cd-beff-ae9a78e80fae", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1790064924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "df4cce8aeb924a148b0dfcc56745f8f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd37d751-cb", "ovs_interfaceid": "bd37d751-cb4d-4517-b9be-bfa192d0fff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.208894] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943686, 'name': ReconfigVM_Task, 'duration_secs': 0.745807} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.215601] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 19dde8dd-eae6-41a0-b147-c505db1cda15/19dde8dd-eae6-41a0-b147-c505db1cda15.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.216640] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943688, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.208261} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.218368] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1965b3b-5305-4aa1-a590-7e45b7ad1f5c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.219070] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 695.220603] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5180f03-7b07-445a-9def-0467611e640e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.246571] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 0649ee2f-cd90-4597-b7c4-09f2acaf3f54/0649ee2f-cd90-4597-b7c4-09f2acaf3f54.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 695.250653] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b4eb146-a8ef-4f92-b8b7-94084ec10096 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.266342] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 695.266342] env[68437]: value = "task-2943689" [ 695.266342] env[68437]: _type = "Task" [ 695.266342] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.274776] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 695.274776] env[68437]: value = "task-2943690" [ 695.274776] env[68437]: _type = "Task" [ 695.274776] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.287227] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943689, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.289691] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "ad773afa-fc0a-4380-901d-af013ce55f2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.291041] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "ad773afa-fc0a-4380-901d-af013ce55f2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.294914] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943690, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.442369] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52774268-2f88-b93d-19ed-53cd4c100b79, 'name': SearchDatastore_Task, 'duration_secs': 0.016282} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.443231] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2cd9450-0fed-4853-8fff-1178a00c835d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.449437] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 695.449437] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5219af14-4313-d3d1-f1ea-89c7b5639d31" [ 695.449437] env[68437]: _type = "Task" [ 695.449437] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.459415] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5219af14-4313-d3d1-f1ea-89c7b5639d31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.510418] env[68437]: DEBUG nova.network.neutron [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Updating instance_info_cache with network_info: [{"id": "9f47f745-6f86-4d20-929b-376aeda67a6e", "address": "fa:16:3e:80:61:4f", "network": {"id": "0259266c-be3d-4f63-b92f-a450f922ff76", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1548136929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73180707777547908c86bf1771a04d05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a407774d-9c2a-411d-9d6f-9ca733b97f3f", "external-id": "nsx-vlan-transportzone-710", "segmentation_id": 710, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f47f745-6f", "ovs_interfaceid": "9f47f745-6f86-4d20-929b-376aeda67a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.538320] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.538712] env[68437]: DEBUG nova.compute.manager [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 695.541219] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.178s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.541463] env[68437]: DEBUG nova.objects.instance [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Lazy-loading 'resources' on Instance uuid df3fbf16-d3d9-4138-b563-6ea09dd233b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 695.555219] env[68437]: DEBUG nova.network.neutron [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Updating instance_info_cache with network_info: [{"id": "ad41ce28-cc67-4b33-b2d2-257330d41543", "address": "fa:16:3e:06:f4:fa", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad41ce28-cc", "ovs_interfaceid": "ad41ce28-cc67-4b33-b2d2-257330d41543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.586089] env[68437]: DEBUG oslo_concurrency.lockutils [req-aedeba22-8349-4625-b7c8-b6f01d39b0a6 req-afb8a024-82ec-4c30-8751-d987efeff89c service nova] Releasing lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.639341] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943687, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.671735] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Releasing lock "refresh_cache-0649ee2f-cd90-4597-b7c4-09f2acaf3f54" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.671929] env[68437]: DEBUG nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Received event network-vif-plugged-9d6a8621-9098-4af3-9f1b-1579862d0ca3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 695.672160] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Acquiring lock "1537e626-f2ec-4b5d-bcba-50cd583dff31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.672367] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Lock "1537e626-f2ec-4b5d-bcba-50cd583dff31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.672529] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Lock "1537e626-f2ec-4b5d-bcba-50cd583dff31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.672990] env[68437]: DEBUG nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] No waiting events found dispatching network-vif-plugged-9d6a8621-9098-4af3-9f1b-1579862d0ca3 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 695.673221] env[68437]: WARNING nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Received unexpected event network-vif-plugged-9d6a8621-9098-4af3-9f1b-1579862d0ca3 for instance with vm_state building and task_state spawning. [ 695.673959] env[68437]: DEBUG nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Received event network-vif-deleted-1856e5ed-7e1c-4837-a8bc-75086deed489 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 695.673959] env[68437]: DEBUG nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Received event network-changed-9d6a8621-9098-4af3-9f1b-1579862d0ca3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 695.674087] env[68437]: DEBUG nova.compute.manager [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Refreshing instance network info cache due to event network-changed-9d6a8621-9098-4af3-9f1b-1579862d0ca3. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 695.674236] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Acquiring lock "refresh_cache-1537e626-f2ec-4b5d-bcba-50cd583dff31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.674383] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Acquired lock "refresh_cache-1537e626-f2ec-4b5d-bcba-50cd583dff31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.674566] env[68437]: DEBUG nova.network.neutron [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Refreshing network info cache for port 9d6a8621-9098-4af3-9f1b-1579862d0ca3 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 695.789615] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943689, 'name': Rename_Task, 'duration_secs': 0.286132} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.798099] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 695.798099] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943690, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.798099] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67224e4e-9aa5-4ddd-9f61-d0e6cb8ef526 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.807014] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 695.807014] env[68437]: value = "task-2943691" [ 695.807014] env[68437]: _type = "Task" [ 695.807014] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.824165] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.963399] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5219af14-4313-d3d1-f1ea-89c7b5639d31, 'name': SearchDatastore_Task, 'duration_secs': 0.017478} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.963399] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.963399] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 1537e626-f2ec-4b5d-bcba-50cd583dff31/1537e626-f2ec-4b5d-bcba-50cd583dff31.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.963399] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc9ff8a2-0246-452a-b50c-199c3632da47 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.976284] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 695.976284] env[68437]: value = "task-2943692" [ 695.976284] env[68437]: _type = "Task" [ 695.976284] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.991564] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.013504] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Releasing lock "refresh_cache-ce8fd88b-249b-4fee-80fc-35b795d24658" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.048369] env[68437]: DEBUG nova.compute.utils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 696.053020] env[68437]: DEBUG nova.compute.manager [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 696.053737] env[68437]: DEBUG nova.network.neutron [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 696.056716] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Releasing lock "refresh_cache-a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.058583] env[68437]: DEBUG nova.compute.manager [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Instance network_info: |[{"id": "ad41ce28-cc67-4b33-b2d2-257330d41543", "address": "fa:16:3e:06:f4:fa", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad41ce28-cc", "ovs_interfaceid": "ad41ce28-cc67-4b33-b2d2-257330d41543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 696.058689] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:f4:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad41ce28-cc67-4b33-b2d2-257330d41543', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 696.066368] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 696.070028] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 696.070151] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-304dcee2-e0e5-4575-893d-44ab0b837f4d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.097405] env[68437]: DEBUG nova.network.neutron [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updated VIF entry in instance network info cache for port 755ab792-6755-4f3f-8d83-38106672f90b. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 696.097669] env[68437]: DEBUG nova.network.neutron [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updating instance_info_cache with network_info: [{"id": "755ab792-6755-4f3f-8d83-38106672f90b", "address": "fa:16:3e:c5:b6:a0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755ab792-67", "ovs_interfaceid": "755ab792-6755-4f3f-8d83-38106672f90b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.111694] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 696.111694] env[68437]: value = "task-2943693" [ 696.111694] env[68437]: _type = "Task" [ 696.111694] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.117656] env[68437]: DEBUG nova.policy [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0e66fd345044e92857d742c65f537ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a36ec823128647758ca8047a5ebf1ae1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 696.125024] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943693, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.144224] env[68437]: DEBUG oslo_vmware.api [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943687, 'name': PowerOnVM_Task, 'duration_secs': 1.306069} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.144224] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 696.144224] env[68437]: INFO nova.compute.manager [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Took 13.67 seconds to spawn the instance on the hypervisor. [ 696.144224] env[68437]: DEBUG nova.compute.manager [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 696.144224] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0274c1a-ff14-446c-8192-3780221d89f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.294918] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943690, 'name': ReconfigVM_Task, 'duration_secs': 0.601819} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.295341] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 0649ee2f-cd90-4597-b7c4-09f2acaf3f54/0649ee2f-cd90-4597-b7c4-09f2acaf3f54.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 696.296029] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cbba94f1-df8e-4b84-b78e-ef0de9ac978c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.307201] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 696.307201] env[68437]: value = "task-2943694" [ 696.307201] env[68437]: _type = "Task" [ 696.307201] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.323516] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943694, 'name': Rename_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.329873] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943691, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.469050] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "c74569b8-dfc9-4a74-9d25-74b484bd9477" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.469618] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "c74569b8-dfc9-4a74-9d25-74b484bd9477" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.491147] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943692, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.522391] env[68437]: DEBUG nova.compute.manager [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 696.523724] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a504404-76fb-47ff-9182-73853842917f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.557177] env[68437]: DEBUG nova.compute.manager [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 696.601258] env[68437]: DEBUG oslo_concurrency.lockutils [req-473575a2-edb0-40ef-991e-1cd4dc649933 req-52bb8ac1-409c-4870-a86d-4208c833c4a5 service nova] Releasing lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.629907] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943693, 'name': CreateVM_Task, 'duration_secs': 0.496019} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.636500] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 696.638820] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.639049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.642727] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 696.642727] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8854425f-5727-442b-ae62-7ec727098e00 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.647819] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 696.647819] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c3db4e-a90c-4e36-1976-23d33bd611cc" [ 696.647819] env[68437]: _type = "Task" [ 696.647819] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.667655] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c3db4e-a90c-4e36-1976-23d33bd611cc, 'name': SearchDatastore_Task, 'duration_secs': 0.013631} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.668258] env[68437]: INFO nova.compute.manager [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Took 21.29 seconds to build instance. [ 696.669301] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.671470] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 696.671470] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.671470] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.671470] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 696.671470] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ac46598-ac33-4635-890e-31667d4fe21e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.691159] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 696.691421] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 696.693328] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e7f2cb5-d4c1-4590-a283-bd29cf72ae9b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.699393] env[68437]: DEBUG nova.network.neutron [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Successfully updated port: a915dbf0-9e3f-41da-b43b-dd0a4225b839 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 696.706792] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 696.706792] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b64c6c-aa95-7e8e-4dd0-3dc3a5c9075f" [ 696.706792] env[68437]: _type = "Task" [ 696.706792] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.709259] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-508a5de1-58a2-4bc5-97f2-2f11a87b6a45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.727155] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8241739c-179a-4d6d-8c53-f1106d407c11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.730394] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b64c6c-aa95-7e8e-4dd0-3dc3a5c9075f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.761059] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63472141-28cc-4720-84d8-8742a34dad35 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.775161] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295d70cd-2640-439d-a409-3b6d0f3ae9dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.794796] env[68437]: DEBUG nova.compute.provider_tree [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 696.827355] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943694, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.830581] env[68437]: DEBUG oslo_vmware.api [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943691, 'name': PowerOnVM_Task, 'duration_secs': 0.744883} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.830962] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 696.831292] env[68437]: INFO nova.compute.manager [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Took 12.02 seconds to spawn the instance on the hypervisor. [ 696.831569] env[68437]: DEBUG nova.compute.manager [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 696.832421] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb88547-ad76-4b29-9a11-cec1b5c14ba5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.913214] env[68437]: DEBUG nova.network.neutron [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Successfully created port: 5b158664-3a56-450a-8a96-2e42835511e3 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.989068] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943692, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65907} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.989315] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 1537e626-f2ec-4b5d-bcba-50cd583dff31/1537e626-f2ec-4b5d-bcba-50cd583dff31.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 696.989472] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 696.992344] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-122c83b8-c0bf-46e3-840f-2995a7a23ad6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.997708] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 696.997708] env[68437]: value = "task-2943695" [ 696.997708] env[68437]: _type = "Task" [ 696.997708] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.006802] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943695, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.172045] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d68c532-0843-4b6e-bdd1-1f49943da025 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.813s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.207294] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "refresh_cache-2f368262-0825-4ccc-9b1e-523b705bcfce" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.207487] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "refresh_cache-2f368262-0825-4ccc-9b1e-523b705bcfce" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.207615] env[68437]: DEBUG nova.network.neutron [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 697.223223] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b64c6c-aa95-7e8e-4dd0-3dc3a5c9075f, 'name': SearchDatastore_Task, 'duration_secs': 0.041783} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.224146] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d68c2657-89fd-4097-9291-2f5ad58b3b41 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.231159] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 697.231159] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52357e25-25e0-2b99-50ed-285e1347b0b2" [ 697.231159] env[68437]: _type = "Task" [ 697.231159] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.247631] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52357e25-25e0-2b99-50ed-285e1347b0b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.248548] env[68437]: DEBUG nova.network.neutron [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Updated VIF entry in instance network info cache for port 9d6a8621-9098-4af3-9f1b-1579862d0ca3. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 697.248886] env[68437]: DEBUG nova.network.neutron [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Updating instance_info_cache with network_info: [{"id": "9d6a8621-9098-4af3-9f1b-1579862d0ca3", "address": "fa:16:3e:d3:3f:6b", "network": {"id": "3c1fc0bf-b55a-48d0-a9c8-d98001d2fe5d", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1137126005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "025967d11eca4b2fb49264397076e046", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d6a8621-90", "ovs_interfaceid": "9d6a8621-9098-4af3-9f1b-1579862d0ca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.319241] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943694, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.333343] env[68437]: ERROR nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [req-684b9c83-9964-43fc-9f71-1928a0cd5ec0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 422e986f-b38b-46ad-94b3-91f3ccd10a05. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-684b9c83-9964-43fc-9f71-1928a0cd5ec0"}]} [ 697.353673] env[68437]: INFO nova.compute.manager [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Took 17.29 seconds to build instance. [ 697.359647] env[68437]: DEBUG nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 697.383835] env[68437]: DEBUG nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 697.384011] env[68437]: DEBUG nova.compute.provider_tree [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 697.397040] env[68437]: DEBUG nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 697.417416] env[68437]: DEBUG nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 697.509087] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943695, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080238} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.509369] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 697.510326] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c4b416-ea39-47af-b721-c4ff19814b31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.538208] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 1537e626-f2ec-4b5d-bcba-50cd583dff31/1537e626-f2ec-4b5d-bcba-50cd583dff31.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 697.541722] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c026d9c9-6ca4-4bb8-8e19-8cdaad39ac1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.564986] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 697.564986] env[68437]: value = "task-2943696" [ 697.564986] env[68437]: _type = "Task" [ 697.564986] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.572553] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943696, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.576425] env[68437]: DEBUG nova.compute.manager [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 697.579530] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21dd2b8-f553-4c8d-b4cd-f261c16e43ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.591935] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Doing hard reboot of VM {{(pid=68437) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 697.592212] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-07578906-51a5-4c34-a8fc-6d6820331f06 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.598011] env[68437]: DEBUG oslo_vmware.api [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 697.598011] env[68437]: value = "task-2943697" [ 697.598011] env[68437]: _type = "Task" [ 697.598011] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.613544] env[68437]: DEBUG oslo_vmware.api [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943697, 'name': ResetVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.622415] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 697.622783] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.623009] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 697.623262] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.623455] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 697.623656] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 697.623968] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 697.624178] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 697.624370] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 697.625024] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 697.625024] env[68437]: DEBUG nova.virt.hardware [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 697.625763] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42aa5f34-b2eb-47ac-bb94-a872b40408cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.637111] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d61b2e-a486-4e17-abd8-92eec96c3a9e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.679459] env[68437]: DEBUG nova.compute.manager [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 697.747743] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52357e25-25e0-2b99-50ed-285e1347b0b2, 'name': SearchDatastore_Task, 'duration_secs': 0.018553} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.747743] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.747743] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f/a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 697.747743] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b165666-b63b-4dd8-9fd9-d39ce3f79642 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.753901] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbaab9d8-4269-44ad-b90f-e952536d8337 req-65be3ca3-40f1-4574-9c21-63e8335c9ef9 service nova] Releasing lock "refresh_cache-1537e626-f2ec-4b5d-bcba-50cd583dff31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.754940] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 697.754940] env[68437]: value = "task-2943698" [ 697.754940] env[68437]: _type = "Task" [ 697.754940] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.764147] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943698, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.823929] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943694, 'name': Rename_Task, 'duration_secs': 1.397411} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.823929] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 697.823929] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a700a9db-9c28-46d3-a59e-a01b3e7e6001 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.829541] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 697.829541] env[68437]: value = "task-2943699" [ 697.829541] env[68437]: _type = "Task" [ 697.829541] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.837791] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943699, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.844211] env[68437]: DEBUG nova.network.neutron [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 697.857075] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eded4a88-b743-4fef-816b-6bb11c3fcb09 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "19dde8dd-eae6-41a0-b147-c505db1cda15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.805s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.899514] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9d912e-2295-4445-b1e1-cbc83b4b2d91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.909773] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6eaa893-eaaf-44b9-a6de-4ff8ddabe3fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.944577] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2b485e-792f-4f5c-9caf-732eff5e78b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.957536] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3001dc0-58e0-4344-a968-d857e985287f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.975411] env[68437]: DEBUG nova.compute.provider_tree [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 698.075976] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.113266] env[68437]: DEBUG oslo_vmware.api [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943697, 'name': ResetVM_Task, 'duration_secs': 0.101199} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.113610] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Did hard reboot of VM {{(pid=68437) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 698.113827] env[68437]: DEBUG nova.compute.manager [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 698.114700] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f96761-8150-490c-90f1-1900f780387f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.169952] env[68437]: DEBUG nova.network.neutron [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Updating instance_info_cache with network_info: [{"id": "a915dbf0-9e3f-41da-b43b-dd0a4225b839", "address": "fa:16:3e:04:4e:a2", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa915dbf0-9e", "ovs_interfaceid": "a915dbf0-9e3f-41da-b43b-dd0a4225b839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.213755] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.270488] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943698, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.284499] env[68437]: DEBUG nova.compute.manager [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Received event network-vif-plugged-ad41ce28-cc67-4b33-b2d2-257330d41543 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 698.284805] env[68437]: DEBUG oslo_concurrency.lockutils [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] Acquiring lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.285120] env[68437]: DEBUG oslo_concurrency.lockutils [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] Lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.286020] env[68437]: DEBUG oslo_concurrency.lockutils [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] Lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.286020] env[68437]: DEBUG nova.compute.manager [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] No waiting events found dispatching network-vif-plugged-ad41ce28-cc67-4b33-b2d2-257330d41543 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 698.286020] env[68437]: WARNING nova.compute.manager [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Received unexpected event network-vif-plugged-ad41ce28-cc67-4b33-b2d2-257330d41543 for instance with vm_state building and task_state spawning. [ 698.286229] env[68437]: DEBUG nova.compute.manager [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Received event network-changed-ad41ce28-cc67-4b33-b2d2-257330d41543 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 698.286375] env[68437]: DEBUG nova.compute.manager [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Refreshing instance network info cache due to event network-changed-ad41ce28-cc67-4b33-b2d2-257330d41543. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 698.286586] env[68437]: DEBUG oslo_concurrency.lockutils [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] Acquiring lock "refresh_cache-a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.286803] env[68437]: DEBUG oslo_concurrency.lockutils [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] Acquired lock "refresh_cache-a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.287522] env[68437]: DEBUG nova.network.neutron [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Refreshing network info cache for port ad41ce28-cc67-4b33-b2d2-257330d41543 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 698.341264] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943699, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.364157] env[68437]: DEBUG nova.compute.manager [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 698.461336] env[68437]: DEBUG nova.compute.manager [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Stashing vm_state: active {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 698.509774] env[68437]: ERROR nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] [req-602496ca-e2cd-4bf4-8424-0e0fabeed3e4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 422e986f-b38b-46ad-94b3-91f3ccd10a05. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-602496ca-e2cd-4bf4-8424-0e0fabeed3e4"}]} [ 698.543336] env[68437]: DEBUG nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 698.575080] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943696, 'name': ReconfigVM_Task, 'duration_secs': 0.569529} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.575378] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 1537e626-f2ec-4b5d-bcba-50cd583dff31/1537e626-f2ec-4b5d-bcba-50cd583dff31.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 698.576034] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-965d5256-b091-43ac-a947-831bd4ba58bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.582370] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 698.582370] env[68437]: value = "task-2943700" [ 698.582370] env[68437]: _type = "Task" [ 698.582370] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.586989] env[68437]: DEBUG nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 698.587964] env[68437]: DEBUG nova.compute.provider_tree [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 698.594539] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943700, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.608071] env[68437]: DEBUG nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 698.628594] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e75cf2bb-e8e1-4b0a-85a2-7a0351dd0cec tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.507s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.632166] env[68437]: DEBUG nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 698.673721] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "refresh_cache-2f368262-0825-4ccc-9b1e-523b705bcfce" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.673721] env[68437]: DEBUG nova.compute.manager [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Instance network_info: |[{"id": "a915dbf0-9e3f-41da-b43b-dd0a4225b839", "address": "fa:16:3e:04:4e:a2", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa915dbf0-9e", "ovs_interfaceid": "a915dbf0-9e3f-41da-b43b-dd0a4225b839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 698.675203] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:4e:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a915dbf0-9e3f-41da-b43b-dd0a4225b839', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 698.683175] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Creating folder: Project (188b948736f44dfa8dd9aeb258180c58). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 698.686952] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71992927-9e82-4d6d-aa14-dd61366203b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.698426] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Created folder: Project (188b948736f44dfa8dd9aeb258180c58) in parent group-v590848. [ 698.698426] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Creating folder: Instances. Parent ref: group-v590889. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 698.698426] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa18b641-63c7-4e08-9d86-2cdfc43a5a02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.711126] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Created folder: Instances in parent group-v590889. [ 698.711430] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 698.711715] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 698.711963] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb764a63-be53-43e2-a870-d6be08bb4e77 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.739855] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 698.739855] env[68437]: value = "task-2943703" [ 698.739855] env[68437]: _type = "Task" [ 698.739855] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.750291] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943703, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.767768] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943698, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546957} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.768069] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f/a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 698.768303] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 698.768578] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f9f41a4-b4fe-4856-937f-adaf13fcbedb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.775350] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 698.775350] env[68437]: value = "task-2943704" [ 698.775350] env[68437]: _type = "Task" [ 698.775350] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.787333] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943704, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.843130] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943699, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.890999] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.986720] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.096775] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943700, 'name': Rename_Task, 'duration_secs': 0.33848} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.096775] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 699.096775] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c93da54a-35fd-4bb2-8e60-3c1c871c9e91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.102769] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 699.102769] env[68437]: value = "task-2943705" [ 699.102769] env[68437]: _type = "Task" [ 699.102769] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.110847] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc1d762-640b-44e6-8676-2b047db083f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.120040] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943705, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.127247] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea86688-d3b6-4c9f-90ab-304109894e0d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.170303] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fceb9a-42cd-4bd7-85ba-fc5e39893380 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.180415] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fab9275-441f-4da2-8a6b-3ac444a99a26 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.197368] env[68437]: DEBUG nova.compute.provider_tree [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 699.202677] env[68437]: DEBUG nova.network.neutron [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Updated VIF entry in instance network info cache for port ad41ce28-cc67-4b33-b2d2-257330d41543. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 699.202677] env[68437]: DEBUG nova.network.neutron [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Updating instance_info_cache with network_info: [{"id": "ad41ce28-cc67-4b33-b2d2-257330d41543", "address": "fa:16:3e:06:f4:fa", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad41ce28-cc", "ovs_interfaceid": "ad41ce28-cc67-4b33-b2d2-257330d41543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.252066] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943703, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.296836] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943704, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075799} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.297136] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 699.298791] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8e6d3e-53f8-41a3-ac3a-d83d71fd2920 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.326936] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f/a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 699.327794] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02597c29-1ffb-4dc5-b1c8-dbcc9e326e2c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.354476] env[68437]: DEBUG oslo_vmware.api [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943699, 'name': PowerOnVM_Task, 'duration_secs': 1.08264} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.356093] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 699.357153] env[68437]: INFO nova.compute.manager [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Took 12.06 seconds to spawn the instance on the hypervisor. [ 699.357153] env[68437]: DEBUG nova.compute.manager [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 699.357153] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 699.357153] env[68437]: value = "task-2943706" [ 699.357153] env[68437]: _type = "Task" [ 699.357153] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.358227] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa56b5d-5cb0-4258-a46d-2dea077a677c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.376782] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.610375] env[68437]: DEBUG nova.network.neutron [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Successfully updated port: 5b158664-3a56-450a-8a96-2e42835511e3 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 699.620245] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943705, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.706405] env[68437]: DEBUG oslo_concurrency.lockutils [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] Releasing lock "refresh_cache-a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.706405] env[68437]: DEBUG nova.compute.manager [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Received event network-vif-plugged-a915dbf0-9e3f-41da-b43b-dd0a4225b839 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 699.706405] env[68437]: DEBUG oslo_concurrency.lockutils [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] Acquiring lock "2f368262-0825-4ccc-9b1e-523b705bcfce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.706405] env[68437]: DEBUG oslo_concurrency.lockutils [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] Lock "2f368262-0825-4ccc-9b1e-523b705bcfce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.706591] env[68437]: DEBUG oslo_concurrency.lockutils [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] Lock "2f368262-0825-4ccc-9b1e-523b705bcfce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.706779] env[68437]: DEBUG nova.compute.manager [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] No waiting events found dispatching network-vif-plugged-a915dbf0-9e3f-41da-b43b-dd0a4225b839 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 699.706867] env[68437]: WARNING nova.compute.manager [req-3b30aa19-7acb-468c-9135-48c46338ab68 req-08b7897d-c1fd-4bbc-9009-0a7755815e7c service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Received unexpected event network-vif-plugged-a915dbf0-9e3f-41da-b43b-dd0a4225b839 for instance with vm_state building and task_state spawning. [ 699.743021] env[68437]: DEBUG nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updated inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with generation 30 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 699.743021] env[68437]: DEBUG nova.compute.provider_tree [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 30 to 31 during operation: update_inventory {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 699.743021] env[68437]: DEBUG nova.compute.provider_tree [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 699.760717] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943703, 'name': CreateVM_Task, 'duration_secs': 0.549872} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.761394] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 699.762366] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.763052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.763138] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 699.763931] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24bde0c2-ddca-4a82-9284-cab0b3e92c5d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.769860] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 699.769860] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520e7213-fc82-a229-1040-51f1d95bc398" [ 699.769860] env[68437]: _type = "Task" [ 699.769860] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.779845] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520e7213-fc82-a229-1040-51f1d95bc398, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.874260] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943706, 'name': ReconfigVM_Task, 'duration_secs': 0.360917} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.881511] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Reconfigured VM instance instance-0000000e to attach disk [datastore1] a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f/a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 699.881511] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09a78e70-9689-4146-a60b-4b369833c290 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.892401] env[68437]: INFO nova.compute.manager [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Took 19.02 seconds to build instance. [ 699.894602] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 699.894602] env[68437]: value = "task-2943707" [ 699.894602] env[68437]: _type = "Task" [ 699.894602] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.904292] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943707, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.113167] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-f517b14c-320f-4a6e-ae74-f2335e22f7a4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.113428] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-f517b14c-320f-4a6e-ae74-f2335e22f7a4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.113477] env[68437]: DEBUG nova.network.neutron [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 700.119038] env[68437]: DEBUG oslo_vmware.api [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943705, 'name': PowerOnVM_Task, 'duration_secs': 0.682287} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.119038] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 700.119038] env[68437]: INFO nova.compute.manager [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Took 10.31 seconds to spawn the instance on the hypervisor. [ 700.119038] env[68437]: DEBUG nova.compute.manager [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 700.119580] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a6e557-d309-470f-ac48-18c1b8cd649d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.255203] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.714s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.257887] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.882s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.258194] env[68437]: DEBUG nova.objects.instance [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lazy-loading 'resources' on Instance uuid 180f77ab-e468-410d-8e41-20291487ef5d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 700.283362] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520e7213-fc82-a229-1040-51f1d95bc398, 'name': SearchDatastore_Task, 'duration_secs': 0.047637} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.283615] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.283899] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.284184] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.284412] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 700.284605] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.284867] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd4d7641-f6fe-4be0-89b6-8a1d20df6040 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.298247] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.298247] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 700.298756] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-891d3bc9-e197-4020-be2d-2eaa13ee1e5c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.306430] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 700.306430] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524112b5-dd8e-00e3-722f-8a1c160f68fc" [ 700.306430] env[68437]: _type = "Task" [ 700.306430] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.312030] env[68437]: INFO nova.scheduler.client.report [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Deleted allocations for instance df3fbf16-d3d9-4138-b563-6ea09dd233b8 [ 700.321842] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524112b5-dd8e-00e3-722f-8a1c160f68fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.391693] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38058333-6241-4475-99c3-1cc85d72d317 tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.548s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.406410] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943707, 'name': Rename_Task, 'duration_secs': 0.398212} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.406894] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 700.407353] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40ae4c53-bf41-4ccb-a211-f90edf670ef6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.417380] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 700.417380] env[68437]: value = "task-2943708" [ 700.417380] env[68437]: _type = "Task" [ 700.417380] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.427950] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.522605] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "5abc2c5a-2177-4d77-97ce-872808bb47ee" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.522898] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.523112] env[68437]: DEBUG nova.compute.manager [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 700.524173] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6fa297-3338-4f18-982f-6e99a4f67572 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.530175] env[68437]: DEBUG nova.compute.manager [req-b160eaba-65d6-4a85-a2b2-bcdd08522d67 req-e830f2df-1074-4745-8aee-0c95b59378e9 service nova] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Received event network-vif-plugged-5b158664-3a56-450a-8a96-2e42835511e3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 700.530460] env[68437]: DEBUG oslo_concurrency.lockutils [req-b160eaba-65d6-4a85-a2b2-bcdd08522d67 req-e830f2df-1074-4745-8aee-0c95b59378e9 service nova] Acquiring lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.530716] env[68437]: DEBUG oslo_concurrency.lockutils [req-b160eaba-65d6-4a85-a2b2-bcdd08522d67 req-e830f2df-1074-4745-8aee-0c95b59378e9 service nova] Lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.531048] env[68437]: DEBUG oslo_concurrency.lockutils [req-b160eaba-65d6-4a85-a2b2-bcdd08522d67 req-e830f2df-1074-4745-8aee-0c95b59378e9 service nova] Lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.531269] env[68437]: DEBUG nova.compute.manager [req-b160eaba-65d6-4a85-a2b2-bcdd08522d67 req-e830f2df-1074-4745-8aee-0c95b59378e9 service nova] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] No waiting events found dispatching network-vif-plugged-5b158664-3a56-450a-8a96-2e42835511e3 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 700.531475] env[68437]: WARNING nova.compute.manager [req-b160eaba-65d6-4a85-a2b2-bcdd08522d67 req-e830f2df-1074-4745-8aee-0c95b59378e9 service nova] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Received unexpected event network-vif-plugged-5b158664-3a56-450a-8a96-2e42835511e3 for instance with vm_state building and task_state spawning. [ 700.541342] env[68437]: DEBUG nova.compute.manager [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 700.542074] env[68437]: DEBUG nova.objects.instance [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lazy-loading 'flavor' on Instance uuid 5abc2c5a-2177-4d77-97ce-872808bb47ee {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 700.644362] env[68437]: INFO nova.compute.manager [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Took 18.53 seconds to build instance. [ 700.690274] env[68437]: DEBUG nova.network.neutron [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 700.826080] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524112b5-dd8e-00e3-722f-8a1c160f68fc, 'name': SearchDatastore_Task, 'duration_secs': 0.017711} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.828821] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8002b4f-85d9-433a-a996-6a43b188bbaa tempest-DeleteServersAdminTestJSON-7562219 tempest-DeleteServersAdminTestJSON-7562219-project-admin] Lock "df3fbf16-d3d9-4138-b563-6ea09dd233b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.780s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.833289] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32a6525b-2b90-45c5-9420-a8f251babc78 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.842827] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 700.842827] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c5b1ec-f7eb-3939-ccd9-4657bebed7e3" [ 700.842827] env[68437]: _type = "Task" [ 700.842827] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.853799] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c5b1ec-f7eb-3939-ccd9-4657bebed7e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.896756] env[68437]: DEBUG nova.compute.manager [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 700.929816] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943708, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.089067] env[68437]: DEBUG nova.network.neutron [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Updating instance_info_cache with network_info: [{"id": "5b158664-3a56-450a-8a96-2e42835511e3", "address": "fa:16:3e:08:d1:d9", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b158664-3a", "ovs_interfaceid": "5b158664-3a56-450a-8a96-2e42835511e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.150092] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1eb9ad96-a0d9-47e0-9d50-eb08274d414d tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "1537e626-f2ec-4b5d-bcba-50cd583dff31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.045s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.206917] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81421ad6-f6ab-47fe-b919-f60238546d6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.216058] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b844e1-02f1-4fa2-a936-9d0d2de974ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.247368] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d90536-c975-4c2e-97fb-3eefe4a3351f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.255436] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf526b6-c7d5-454a-a835-11b5e9284347 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.269607] env[68437]: DEBUG nova.compute.provider_tree [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.358030] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c5b1ec-f7eb-3939-ccd9-4657bebed7e3, 'name': SearchDatastore_Task, 'duration_secs': 0.012356} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.358030] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.358442] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 701.358543] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf38fccd-4913-4c5e-a02e-36f4cbf15c45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.368059] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 701.368059] env[68437]: value = "task-2943709" [ 701.368059] env[68437]: _type = "Task" [ 701.368059] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.383039] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.428518] env[68437]: DEBUG oslo_vmware.api [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943708, 'name': PowerOnVM_Task, 'duration_secs': 0.954489} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.428845] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 701.429302] env[68437]: INFO nova.compute.manager [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Took 9.02 seconds to spawn the instance on the hypervisor. [ 701.429531] env[68437]: DEBUG nova.compute.manager [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 701.431097] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f661d96f-2f0c-4222-b774-43310dfaae07 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.433996] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.549751] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 701.552015] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27744fe3-9c73-432d-abae-cfd1dbbddd1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.558735] env[68437]: DEBUG oslo_vmware.api [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 701.558735] env[68437]: value = "task-2943710" [ 701.558735] env[68437]: _type = "Task" [ 701.558735] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.570509] env[68437]: DEBUG oslo_vmware.api [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.592756] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-f517b14c-320f-4a6e-ae74-f2335e22f7a4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.594100] env[68437]: DEBUG nova.compute.manager [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Instance network_info: |[{"id": "5b158664-3a56-450a-8a96-2e42835511e3", "address": "fa:16:3e:08:d1:d9", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b158664-3a", "ovs_interfaceid": "5b158664-3a56-450a-8a96-2e42835511e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 701.594218] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:d1:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b158664-3a56-450a-8a96-2e42835511e3', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 701.602953] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Creating folder: Project (a36ec823128647758ca8047a5ebf1ae1). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 701.602953] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa054dd1-35a5-4e43-a11a-656d3ce3e857 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.615474] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Created folder: Project (a36ec823128647758ca8047a5ebf1ae1) in parent group-v590848. [ 701.615695] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Creating folder: Instances. Parent ref: group-v590892. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 701.615963] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-170f54f1-7b1d-4c91-a1ca-c1e543c91caf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.627516] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Created folder: Instances in parent group-v590892. [ 701.627820] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 701.628063] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 701.628622] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dfd1c72-3946-4f61-bfb5-05d1171faa80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.653143] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 701.653143] env[68437]: value = "task-2943713" [ 701.653143] env[68437]: _type = "Task" [ 701.653143] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.657130] env[68437]: DEBUG nova.compute.manager [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 701.663813] env[68437]: DEBUG nova.compute.manager [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Received event network-changed-a915dbf0-9e3f-41da-b43b-dd0a4225b839 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 701.664330] env[68437]: DEBUG nova.compute.manager [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Refreshing instance network info cache due to event network-changed-a915dbf0-9e3f-41da-b43b-dd0a4225b839. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 701.664610] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] Acquiring lock "refresh_cache-2f368262-0825-4ccc-9b1e-523b705bcfce" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.664813] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] Acquired lock "refresh_cache-2f368262-0825-4ccc-9b1e-523b705bcfce" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.664910] env[68437]: DEBUG nova.network.neutron [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Refreshing network info cache for port a915dbf0-9e3f-41da-b43b-dd0a4225b839 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 701.673864] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943713, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.775439] env[68437]: DEBUG nova.scheduler.client.report [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 701.887090] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943709, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.954528] env[68437]: INFO nova.compute.manager [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Took 18.82 seconds to build instance. [ 702.074674] env[68437]: DEBUG oslo_vmware.api [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943710, 'name': PowerOffVM_Task, 'duration_secs': 0.247732} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.074674] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 702.074674] env[68437]: DEBUG nova.compute.manager [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 702.074674] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9b3329-dc6a-4e63-9c15-eabcab2b4b79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.131162] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "ce8fd88b-249b-4fee-80fc-35b795d24658" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.131162] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.131162] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "ce8fd88b-249b-4fee-80fc-35b795d24658-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.131162] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.131361] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.136716] env[68437]: INFO nova.compute.manager [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Terminating instance [ 702.166488] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943713, 'name': CreateVM_Task, 'duration_secs': 0.427937} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.174127] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 702.174823] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.174984] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.175348] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 702.175907] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-803faade-f9a9-4375-b9f7-c3871c331098 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.181749] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 702.181749] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ff5f18-a581-d69f-6730-270ee814c00b" [ 702.181749] env[68437]: _type = "Task" [ 702.181749] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.192889] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ff5f18-a581-d69f-6730-270ee814c00b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.195385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.282755] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.025s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.285291] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.447s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.287255] env[68437]: INFO nova.compute.claims [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.306244] env[68437]: INFO nova.scheduler.client.report [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Deleted allocations for instance 180f77ab-e468-410d-8e41-20291487ef5d [ 702.383618] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57767} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.384026] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 702.384184] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 702.384765] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38a2cea4-c768-47b3-96e1-bc879e599153 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.394032] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 702.394032] env[68437]: value = "task-2943714" [ 702.394032] env[68437]: _type = "Task" [ 702.394032] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.403809] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.457987] env[68437]: DEBUG oslo_concurrency.lockutils [None req-87673bd0-d600-4ceb-8427-d4b1c5ec795a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.340s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.590204] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0524ed5-c7ad-450a-8851-f625eab652e2 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.067s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.596803] env[68437]: DEBUG nova.network.neutron [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Updated VIF entry in instance network info cache for port a915dbf0-9e3f-41da-b43b-dd0a4225b839. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 702.597727] env[68437]: DEBUG nova.network.neutron [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Updating instance_info_cache with network_info: [{"id": "a915dbf0-9e3f-41da-b43b-dd0a4225b839", "address": "fa:16:3e:04:4e:a2", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa915dbf0-9e", "ovs_interfaceid": "a915dbf0-9e3f-41da-b43b-dd0a4225b839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.646026] env[68437]: DEBUG nova.compute.manager [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 702.646026] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 702.646026] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d995adde-09fe-49b1-be5d-3efcee2b60df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.657184] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 702.657828] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dba984e2-e4cc-4ebc-87f3-42932f98a1c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.666967] env[68437]: DEBUG oslo_vmware.api [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 702.666967] env[68437]: value = "task-2943715" [ 702.666967] env[68437]: _type = "Task" [ 702.666967] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.674248] env[68437]: DEBUG oslo_vmware.api [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943715, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.693789] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ff5f18-a581-d69f-6730-270ee814c00b, 'name': SearchDatastore_Task, 'duration_secs': 0.013571} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.694351] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.694351] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 702.694620] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.694710] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.694928] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 702.695215] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7200906-9479-4e49-bcbe-62b7845c37da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.705414] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 702.705542] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 702.706301] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e68354f3-77a5-4e96-85db-8e5424bc57c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.711858] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 702.711858] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52da6faa-59e0-2fea-fe30-395d88710faf" [ 702.711858] env[68437]: _type = "Task" [ 702.711858] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.720207] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52da6faa-59e0-2fea-fe30-395d88710faf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.810429] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.810573] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.818036] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a487d09-0fd0-4440-8964-da5a6327cd88 tempest-ServerDiagnosticsNegativeTest-1892603893 tempest-ServerDiagnosticsNegativeTest-1892603893-project-member] Lock "180f77ab-e468-410d-8e41-20291487ef5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.815s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.859180] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.859180] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.903064] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087528} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.903824] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 702.904708] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34d268e-68bc-4823-a35b-2ae84de8b514 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.928039] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 702.929607] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-459c4e44-d3df-4335-a6e6-be3817e1c6de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.952093] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 702.952093] env[68437]: value = "task-2943716" [ 702.952093] env[68437]: _type = "Task" [ 702.952093] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.961727] env[68437]: DEBUG nova.compute.manager [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 703.100697] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] Releasing lock "refresh_cache-2f368262-0825-4ccc-9b1e-523b705bcfce" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.101839] env[68437]: DEBUG nova.compute.manager [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Received event network-changed-cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 703.102298] env[68437]: DEBUG nova.compute.manager [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Refreshing instance network info cache due to event network-changed-cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 703.106133] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] Acquiring lock "refresh_cache-ea330078-a8f2-41f4-a161-5d0e29ddfab5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.106133] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] Acquired lock "refresh_cache-ea330078-a8f2-41f4-a161-5d0e29ddfab5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.106133] env[68437]: DEBUG nova.network.neutron [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Refreshing network info cache for port cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 703.180324] env[68437]: DEBUG oslo_vmware.api [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943715, 'name': PowerOffVM_Task, 'duration_secs': 0.446131} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.180605] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 703.180793] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 703.181069] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d7c41eb-0317-4821-8a11-06a145ef1381 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.225904] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52da6faa-59e0-2fea-fe30-395d88710faf, 'name': SearchDatastore_Task, 'duration_secs': 0.0164} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.227312] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-592afc5c-ce59-4f69-9c63-7226fc53ae69 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.235724] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 703.235724] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52084e57-4ed0-b42f-8cd2-2c71f5f03264" [ 703.235724] env[68437]: _type = "Task" [ 703.235724] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.246311] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 703.246311] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 703.246311] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Deleting the datastore file [datastore2] ce8fd88b-249b-4fee-80fc-35b795d24658 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 703.246764] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-192b5183-ac5a-40da-9779-6d94ba4d3c36 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.252633] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52084e57-4ed0-b42f-8cd2-2c71f5f03264, 'name': SearchDatastore_Task, 'duration_secs': 0.014034} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.252633] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.252790] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4/f517b14c-320f-4a6e-ae74-f2335e22f7a4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 703.252974] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ae40fc1-d415-4a0d-af10-6db9ad876fe7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.256522] env[68437]: DEBUG oslo_vmware.api [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for the task: (returnval){ [ 703.256522] env[68437]: value = "task-2943718" [ 703.256522] env[68437]: _type = "Task" [ 703.256522] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.261412] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 703.261412] env[68437]: value = "task-2943719" [ 703.261412] env[68437]: _type = "Task" [ 703.261412] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.267893] env[68437]: DEBUG oslo_vmware.api [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.272124] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943719, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.466804] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943716, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.489084] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.744261] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe20edf9-0492-43bf-b078-66619999b40c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.755588] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217607b2-39da-4c15-88e8-6c8987a3428b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.810042] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb482a4-02d2-41b8-9b1b-4aaa08d9906b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.812886] env[68437]: DEBUG oslo_vmware.api [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Task: {'id': task-2943718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.313422} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.813219] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943719, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.813587] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 703.813844] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 703.814117] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 703.814364] env[68437]: INFO nova.compute.manager [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Took 1.17 seconds to destroy the instance on the hypervisor. [ 703.815065] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 703.816294] env[68437]: DEBUG nova.compute.manager [-] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 703.816496] env[68437]: DEBUG nova.network.neutron [-] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 703.823013] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f05c813-cd69-4e62-91d3-fc19f6e67bb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.839231] env[68437]: DEBUG nova.compute.provider_tree [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.963958] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943716, 'name': ReconfigVM_Task, 'duration_secs': 0.675952} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.964277] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 703.964931] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f3c34a5-e93b-471d-ac91-d9a182d70ac5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.972279] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 703.972279] env[68437]: value = "task-2943720" [ 703.972279] env[68437]: _type = "Task" [ 703.972279] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.985116] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943720, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.095062] env[68437]: DEBUG nova.network.neutron [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Updated VIF entry in instance network info cache for port cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 704.095062] env[68437]: DEBUG nova.network.neutron [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Updating instance_info_cache with network_info: [{"id": "cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49", "address": "fa:16:3e:72:d6:46", "network": {"id": "06eb8d2b-d1b9-4dee-ada5-4dbf2f3a3f1e", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1510705003-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "518f367d97ad43bb9653dc4a5137e1bb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbc16d3e-2e", "ovs_interfaceid": "cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.102142] env[68437]: DEBUG nova.compute.manager [req-16feef23-c2c5-418d-acb2-1c3bfdad7c54 req-6d335d5b-7b28-4320-afb4-77ec62b5df23 service nova] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Received event network-changed-5b158664-3a56-450a-8a96-2e42835511e3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 704.103454] env[68437]: DEBUG nova.compute.manager [req-16feef23-c2c5-418d-acb2-1c3bfdad7c54 req-6d335d5b-7b28-4320-afb4-77ec62b5df23 service nova] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Refreshing instance network info cache due to event network-changed-5b158664-3a56-450a-8a96-2e42835511e3. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 704.104295] env[68437]: DEBUG oslo_concurrency.lockutils [req-16feef23-c2c5-418d-acb2-1c3bfdad7c54 req-6d335d5b-7b28-4320-afb4-77ec62b5df23 service nova] Acquiring lock "refresh_cache-f517b14c-320f-4a6e-ae74-f2335e22f7a4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.104843] env[68437]: DEBUG oslo_concurrency.lockutils [req-16feef23-c2c5-418d-acb2-1c3bfdad7c54 req-6d335d5b-7b28-4320-afb4-77ec62b5df23 service nova] Acquired lock "refresh_cache-f517b14c-320f-4a6e-ae74-f2335e22f7a4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.105444] env[68437]: DEBUG nova.network.neutron [req-16feef23-c2c5-418d-acb2-1c3bfdad7c54 req-6d335d5b-7b28-4320-afb4-77ec62b5df23 service nova] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Refreshing network info cache for port 5b158664-3a56-450a-8a96-2e42835511e3 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 704.275213] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943719, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.692053} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.275757] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4/f517b14c-320f-4a6e-ae74-f2335e22f7a4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 704.276277] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 704.276687] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e7d9ec1-5da1-4f6b-9523-740e54ab06e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.291035] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 704.291035] env[68437]: value = "task-2943721" [ 704.291035] env[68437]: _type = "Task" [ 704.291035] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.297800] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.346352] env[68437]: DEBUG nova.scheduler.client.report [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 704.482261] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943720, 'name': Rename_Task, 'duration_secs': 0.183809} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.482537] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 704.482876] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4533fe6-2097-44b0-ac91-17db189c57b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.489621] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 704.489621] env[68437]: value = "task-2943722" [ 704.489621] env[68437]: _type = "Task" [ 704.489621] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.499473] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943722, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.602118] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bc885c8-110f-4990-b5ef-a1929fb73437 req-9497bfba-43ec-4332-ad61-0c6436305d5a service nova] Releasing lock "refresh_cache-ea330078-a8f2-41f4-a161-5d0e29ddfab5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.696510] env[68437]: DEBUG nova.network.neutron [-] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.788780] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquiring lock "1537e626-f2ec-4b5d-bcba-50cd583dff31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.789702] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "1537e626-f2ec-4b5d-bcba-50cd583dff31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.790449] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquiring lock "1537e626-f2ec-4b5d-bcba-50cd583dff31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.790674] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "1537e626-f2ec-4b5d-bcba-50cd583dff31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.790887] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "1537e626-f2ec-4b5d-bcba-50cd583dff31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.797923] env[68437]: INFO nova.compute.manager [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Terminating instance [ 704.808981] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085054} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.809988] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 704.811458] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c6ac47-793f-490f-8fa0-36bb502ca164 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.842147] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4/f517b14c-320f-4a6e-ae74-f2335e22f7a4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 704.842147] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85f933d3-2b32-4fc6-be20-c45d0dd716e4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.860642] env[68437]: DEBUG nova.network.neutron [req-16feef23-c2c5-418d-acb2-1c3bfdad7c54 req-6d335d5b-7b28-4320-afb4-77ec62b5df23 service nova] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Updated VIF entry in instance network info cache for port 5b158664-3a56-450a-8a96-2e42835511e3. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 704.864023] env[68437]: DEBUG nova.network.neutron [req-16feef23-c2c5-418d-acb2-1c3bfdad7c54 req-6d335d5b-7b28-4320-afb4-77ec62b5df23 service nova] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Updating instance_info_cache with network_info: [{"id": "5b158664-3a56-450a-8a96-2e42835511e3", "address": "fa:16:3e:08:d1:d9", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b158664-3a", "ovs_interfaceid": "5b158664-3a56-450a-8a96-2e42835511e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.864023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.864222] env[68437]: DEBUG nova.compute.manager [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 704.866349] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.825s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.866549] env[68437]: DEBUG nova.objects.instance [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lazy-loading 'resources' on Instance uuid ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 704.872585] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 704.872585] env[68437]: value = "task-2943723" [ 704.872585] env[68437]: _type = "Task" [ 704.872585] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.882548] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943723, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.000922] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943722, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.200332] env[68437]: INFO nova.compute.manager [-] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Took 1.38 seconds to deallocate network for instance. [ 705.234380] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.235269] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.310324] env[68437]: DEBUG nova.compute.manager [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 705.312422] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 705.312422] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063e9269-1b3b-4c1c-bfe9-86d9a277af9e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.324720] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 705.324720] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-288496b1-2c58-4871-a9a5-464d9a09d613 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.335030] env[68437]: DEBUG oslo_vmware.api [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 705.335030] env[68437]: value = "task-2943724" [ 705.335030] env[68437]: _type = "Task" [ 705.335030] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.342831] env[68437]: DEBUG oslo_vmware.api [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943724, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.370070] env[68437]: DEBUG oslo_concurrency.lockutils [req-16feef23-c2c5-418d-acb2-1c3bfdad7c54 req-6d335d5b-7b28-4320-afb4-77ec62b5df23 service nova] Releasing lock "refresh_cache-f517b14c-320f-4a6e-ae74-f2335e22f7a4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.370070] env[68437]: DEBUG nova.compute.utils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 705.373498] env[68437]: DEBUG nova.compute.manager [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 705.373875] env[68437]: DEBUG nova.network.neutron [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 705.388311] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943723, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.475457] env[68437]: DEBUG nova.policy [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2dda8b15280c4d2282d4dc88aa3d607d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63985eb5e5fb47958fd673bd0ce73f2d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 705.486767] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7ada6f-710d-41c6-98a1-43468c751ee4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.500275] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a0ca14ad-0bbd-4a7a-920f-cfe3bf1ed36e tempest-ServersAdminNegativeTestJSON-115865355 tempest-ServersAdminNegativeTestJSON-115865355-project-admin] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Suspending the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 705.501864] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-75e93d24-2f0f-4c55-9359-d9bdfb6d99da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.508024] env[68437]: DEBUG oslo_vmware.api [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943722, 'name': PowerOnVM_Task, 'duration_secs': 0.935546} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.508024] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 705.508312] env[68437]: INFO nova.compute.manager [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Took 10.54 seconds to spawn the instance on the hypervisor. [ 705.508533] env[68437]: DEBUG nova.compute.manager [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.509633] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73c8dbe-bea7-498e-8fa7-f4a8846e2a31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.515949] env[68437]: DEBUG oslo_vmware.api [None req-a0ca14ad-0bbd-4a7a-920f-cfe3bf1ed36e tempest-ServersAdminNegativeTestJSON-115865355 tempest-ServersAdminNegativeTestJSON-115865355-project-admin] Waiting for the task: (returnval){ [ 705.515949] env[68437]: value = "task-2943725" [ 705.515949] env[68437]: _type = "Task" [ 705.515949] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.530534] env[68437]: DEBUG oslo_vmware.api [None req-a0ca14ad-0bbd-4a7a-920f-cfe3bf1ed36e tempest-ServersAdminNegativeTestJSON-115865355 tempest-ServersAdminNegativeTestJSON-115865355-project-admin] Task: {'id': task-2943725, 'name': SuspendVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.709630] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.844477] env[68437]: DEBUG oslo_vmware.api [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943724, 'name': PowerOffVM_Task, 'duration_secs': 0.317998} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.844959] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 705.847457] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 705.847457] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9ff77a4-816c-4131-93b3-b689670e74e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.852112] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f841da21-dcec-4489-96df-4a27072109b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.861729] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d4f4c1-731a-470f-b576-0bff69a14771 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.903020] env[68437]: DEBUG nova.compute.manager [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 705.910068] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a98d78c-1eee-4f22-8db5-b7b470412833 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.914113] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 705.914223] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 705.914395] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Deleting the datastore file [datastore1] 1537e626-f2ec-4b5d-bcba-50cd583dff31 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 705.915765] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-361ac5f8-92bd-4d30-a2ae-80d67495a14a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.923973] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943723, 'name': ReconfigVM_Task, 'duration_secs': 0.610125} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.924797] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Reconfigured VM instance instance-00000010 to attach disk [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4/f517b14c-320f-4a6e-ae74-f2335e22f7a4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 705.929808] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b456f45-72e7-4194-baeb-adda61e48023 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.936244] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3270a78c-9f28-413c-9964-a7330e3961bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.938618] env[68437]: DEBUG oslo_vmware.api [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for the task: (returnval){ [ 705.938618] env[68437]: value = "task-2943727" [ 705.938618] env[68437]: _type = "Task" [ 705.938618] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.952057] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 705.952057] env[68437]: value = "task-2943728" [ 705.952057] env[68437]: _type = "Task" [ 705.952057] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.953405] env[68437]: DEBUG nova.compute.provider_tree [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.962423] env[68437]: DEBUG oslo_vmware.api [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943727, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.966678] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943728, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.974572] env[68437]: DEBUG nova.compute.manager [req-9fd63f4e-d69c-4d3d-a569-ef19e2261bfc req-d4298b6a-1c21-4a73-8501-8036c2505759 service nova] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Received event network-vif-deleted-9f47f745-6f86-4d20-929b-376aeda67a6e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 706.035395] env[68437]: DEBUG oslo_vmware.api [None req-a0ca14ad-0bbd-4a7a-920f-cfe3bf1ed36e tempest-ServersAdminNegativeTestJSON-115865355 tempest-ServersAdminNegativeTestJSON-115865355-project-admin] Task: {'id': task-2943725, 'name': SuspendVM_Task} progress is 62%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.041285] env[68437]: INFO nova.compute.manager [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Took 22.15 seconds to build instance. [ 706.111247] env[68437]: DEBUG nova.network.neutron [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Successfully created port: 77d0b04c-ecff-4b2e-a001-7248da043b47 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.458056] env[68437]: DEBUG nova.scheduler.client.report [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 706.460624] env[68437]: DEBUG oslo_vmware.api [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Task: {'id': task-2943727, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179012} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.467807] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 706.468084] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 706.468367] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 706.468529] env[68437]: INFO nova.compute.manager [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Took 1.16 seconds to destroy the instance on the hypervisor. [ 706.468803] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.469047] env[68437]: DEBUG nova.compute.manager [-] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 706.469250] env[68437]: DEBUG nova.network.neutron [-] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 706.481105] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943728, 'name': Rename_Task, 'duration_secs': 0.206389} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.482147] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 706.482398] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e520dd2-9299-4146-86e6-855322a047f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.491241] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 706.491241] env[68437]: value = "task-2943729" [ 706.491241] env[68437]: _type = "Task" [ 706.491241] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.502018] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943729, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.528793] env[68437]: DEBUG oslo_vmware.api [None req-a0ca14ad-0bbd-4a7a-920f-cfe3bf1ed36e tempest-ServersAdminNegativeTestJSON-115865355 tempest-ServersAdminNegativeTestJSON-115865355-project-admin] Task: {'id': task-2943725, 'name': SuspendVM_Task, 'duration_secs': 0.691611} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.529081] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a0ca14ad-0bbd-4a7a-920f-cfe3bf1ed36e tempest-ServersAdminNegativeTestJSON-115865355 tempest-ServersAdminNegativeTestJSON-115865355-project-admin] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Suspended the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 706.529271] env[68437]: DEBUG nova.compute.manager [None req-a0ca14ad-0bbd-4a7a-920f-cfe3bf1ed36e tempest-ServersAdminNegativeTestJSON-115865355 tempest-ServersAdminNegativeTestJSON-115865355-project-admin] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 706.530201] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd750b0c-3e44-4808-ac7f-343980a9dfd7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.544171] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba55506-aee2-4f0d-bc1b-8ba17e9c9eb4 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "2f368262-0825-4ccc-9b1e-523b705bcfce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.662s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.562824] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquiring lock "b92efa60-ef18-4578-b00d-6a2438e7eacf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.563428] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "b92efa60-ef18-4578-b00d-6a2438e7eacf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.916019] env[68437]: DEBUG nova.compute.manager [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 706.949767] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 706.949767] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.949767] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 706.950088] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.950088] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 706.950088] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 706.950088] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 706.950088] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 706.950271] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 706.950394] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 706.950841] env[68437]: DEBUG nova.virt.hardware [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 706.952155] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515787cb-6b49-416c-b2a2-90ca70ad8993 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.965671] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1faf475-92e7-4e06-a16e-8633ce5b2c49 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.974506] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.977229] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.301s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.979835] env[68437]: INFO nova.compute.claims [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.003915] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943729, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.005524] env[68437]: INFO nova.scheduler.client.report [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Deleted allocations for instance ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236 [ 707.047301] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 707.509971] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943729, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.514259] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3fc48608-013f-43e4-a90b-5e73153789aa tempest-ServerDiagnosticsTest-1341645442 tempest-ServerDiagnosticsTest-1341645442-project-member] Lock "ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.024s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.561787] env[68437]: DEBUG nova.network.neutron [-] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.580775] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.672467] env[68437]: DEBUG nova.network.neutron [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Successfully updated port: 77d0b04c-ecff-4b2e-a001-7248da043b47 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 708.005948] env[68437]: DEBUG oslo_vmware.api [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943729, 'name': PowerOnVM_Task, 'duration_secs': 1.239276} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.006336] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 708.006486] env[68437]: INFO nova.compute.manager [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Took 10.43 seconds to spawn the instance on the hypervisor. [ 708.006696] env[68437]: DEBUG nova.compute.manager [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 708.007551] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfb7680-b6c1-4bdf-b99e-68e921617274 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.064628] env[68437]: INFO nova.compute.manager [-] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Took 1.60 seconds to deallocate network for instance. [ 708.183553] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "refresh_cache-cf394b0b-cb14-4ae1-81bb-622c951bfdab" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.183553] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquired lock "refresh_cache-cf394b0b-cb14-4ae1-81bb-622c951bfdab" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.183553] env[68437]: DEBUG nova.network.neutron [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 708.262507] env[68437]: DEBUG nova.compute.manager [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 708.263816] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657d4489-b8e2-4279-8b50-e35f116bae80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.412267] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3e01b1-6127-4f1c-92fa-ee5428c76fa8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.421685] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f3d6a3-29e8-46a7-b3fb-36e2facb34eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.458127] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80622446-64c5-4a24-9b1c-5c4830cb39ee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.467883] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b898b6-5af0-4349-b48c-df403202da18 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.485420] env[68437]: DEBUG nova.compute.provider_tree [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.534657] env[68437]: INFO nova.compute.manager [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Took 23.56 seconds to build instance. [ 708.574370] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.719174] env[68437]: DEBUG nova.network.neutron [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 708.789160] env[68437]: INFO nova.compute.manager [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] instance snapshotting [ 708.789160] env[68437]: WARNING nova.compute.manager [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 708.791771] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1263b3-984a-4a43-b0de-ac192d172e11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.815591] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c8751e-2e77-461a-9c01-c0666c81dcdd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.952723] env[68437]: DEBUG nova.compute.manager [req-10e87352-f3ec-44c0-aca2-489e133acc1a req-eb345e1e-3d75-48a1-8364-fdfb10c13966 service nova] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Received event network-vif-deleted-9d6a8621-9098-4af3-9f1b-1579862d0ca3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 708.963336] env[68437]: DEBUG nova.network.neutron [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Updating instance_info_cache with network_info: [{"id": "77d0b04c-ecff-4b2e-a001-7248da043b47", "address": "fa:16:3e:2e:b7:05", "network": {"id": "2931ca1e-83cd-4f65-9712-488dad412e0d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-642400798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63985eb5e5fb47958fd673bd0ce73f2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77d0b04c-ec", "ovs_interfaceid": "77d0b04c-ecff-4b2e-a001-7248da043b47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.988850] env[68437]: DEBUG nova.scheduler.client.report [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 709.036509] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d7670765-5a8f-458a-8b56-931f3085d4a2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.113s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.327361] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 709.329167] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-adb82e5c-f2d9-4c80-b9e7-adbbc5597e12 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.339773] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 709.339773] env[68437]: value = "task-2943730" [ 709.339773] env[68437]: _type = "Task" [ 709.339773] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.349728] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943730, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.469125] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Releasing lock "refresh_cache-cf394b0b-cb14-4ae1-81bb-622c951bfdab" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.469125] env[68437]: DEBUG nova.compute.manager [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Instance network_info: |[{"id": "77d0b04c-ecff-4b2e-a001-7248da043b47", "address": "fa:16:3e:2e:b7:05", "network": {"id": "2931ca1e-83cd-4f65-9712-488dad412e0d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-642400798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63985eb5e5fb47958fd673bd0ce73f2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77d0b04c-ec", "ovs_interfaceid": "77d0b04c-ecff-4b2e-a001-7248da043b47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 709.469290] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:b7:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77d0b04c-ecff-4b2e-a001-7248da043b47', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 709.478023] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Creating folder: Project (63985eb5e5fb47958fd673bd0ce73f2d). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 709.478715] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26e219c5-b481-43fd-b050-bcded0c8b0a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.490278] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Created folder: Project (63985eb5e5fb47958fd673bd0ce73f2d) in parent group-v590848. [ 709.490278] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Creating folder: Instances. Parent ref: group-v590895. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 709.490278] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fad45592-4584-4c52-9784-5238be7a599c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.496666] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.499423] env[68437]: DEBUG nova.compute.manager [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 709.503801] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.579s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.506088] env[68437]: INFO nova.compute.claims [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.511236] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Created folder: Instances in parent group-v590895. [ 709.511236] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 709.511236] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 709.511236] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e2786a4-230a-43bd-af8e-48226c3e8e7f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.539926] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 709.544864] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 709.544864] env[68437]: value = "task-2943733" [ 709.544864] env[68437]: _type = "Task" [ 709.544864] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.556355] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943733, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.752196] env[68437]: DEBUG nova.compute.manager [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Received event network-vif-plugged-77d0b04c-ecff-4b2e-a001-7248da043b47 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 709.752329] env[68437]: DEBUG oslo_concurrency.lockutils [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] Acquiring lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.752794] env[68437]: DEBUG oslo_concurrency.lockutils [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.752794] env[68437]: DEBUG oslo_concurrency.lockutils [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.752794] env[68437]: DEBUG nova.compute.manager [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] No waiting events found dispatching network-vif-plugged-77d0b04c-ecff-4b2e-a001-7248da043b47 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 709.752996] env[68437]: WARNING nova.compute.manager [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Received unexpected event network-vif-plugged-77d0b04c-ecff-4b2e-a001-7248da043b47 for instance with vm_state building and task_state spawning. [ 709.753180] env[68437]: DEBUG nova.compute.manager [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Received event network-changed-77d0b04c-ecff-4b2e-a001-7248da043b47 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 709.753485] env[68437]: DEBUG nova.compute.manager [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Refreshing instance network info cache due to event network-changed-77d0b04c-ecff-4b2e-a001-7248da043b47. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 709.753531] env[68437]: DEBUG oslo_concurrency.lockutils [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] Acquiring lock "refresh_cache-cf394b0b-cb14-4ae1-81bb-622c951bfdab" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.755300] env[68437]: DEBUG oslo_concurrency.lockutils [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] Acquired lock "refresh_cache-cf394b0b-cb14-4ae1-81bb-622c951bfdab" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.755300] env[68437]: DEBUG nova.network.neutron [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Refreshing network info cache for port 77d0b04c-ecff-4b2e-a001-7248da043b47 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 709.851575] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943730, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.857029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "9a7c248f-5262-4f03-aace-f22c4976bb0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.857237] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.005098] env[68437]: DEBUG nova.compute.utils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 710.007619] env[68437]: DEBUG nova.compute.manager [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 710.008084] env[68437]: DEBUG nova.network.neutron [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 710.067308] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquiring lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.067721] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.069924] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943733, 'name': CreateVM_Task, 'duration_secs': 0.311794} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.072026] env[68437]: DEBUG nova.policy [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e8a11819f91e486b86a9cc41c1fd7ec5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b002244273f41d89ddf47570ffe6a02', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 710.073735] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 710.074561] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.074722] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.075049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 710.079746] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da9634d5-149b-4c23-9975-29fdca4f8a59 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.082684] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.086608] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 710.086608] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523b4e74-89ef-6ffe-2d98-a599f19666bd" [ 710.086608] env[68437]: _type = "Task" [ 710.086608] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.096321] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523b4e74-89ef-6ffe-2d98-a599f19666bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.351648] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943730, 'name': CreateSnapshot_Task, 'duration_secs': 0.748842} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.351973] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 710.352947] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fc18a1-72b9-4d08-9ebc-9e2aad433560 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.508505] env[68437]: DEBUG nova.compute.manager [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 710.600557] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523b4e74-89ef-6ffe-2d98-a599f19666bd, 'name': SearchDatastore_Task, 'duration_secs': 0.009479} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.600886] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.601140] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 710.601381] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.601523] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.601697] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 710.601962] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffa2a136-051c-47a1-8775-1be514d71381 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.613126] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 710.613126] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 710.613867] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6067e08c-fe86-4e79-bba7-4fe06b7cd55a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.618958] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 710.618958] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52aa6b2b-e71d-9390-ce83-4b6e991a71d5" [ 710.618958] env[68437]: _type = "Task" [ 710.618958] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.631733] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52aa6b2b-e71d-9390-ce83-4b6e991a71d5, 'name': SearchDatastore_Task, 'duration_secs': 0.008348} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.635024] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d125897-91f0-4c15-98a2-05c4b5b94a2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.639978] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 710.639978] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52512ff3-3e94-a130-a731-bc7cc81752da" [ 710.639978] env[68437]: _type = "Task" [ 710.639978] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.650215] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52512ff3-3e94-a130-a731-bc7cc81752da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.804758] env[68437]: DEBUG nova.network.neutron [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Successfully created port: 020f4f15-f02d-4a17-a872-71d79b1ea226 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.873617] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 710.876912] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2096809c-c42e-46b0-a87a-c6de4587d03b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.889875] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 710.889875] env[68437]: value = "task-2943734" [ 710.889875] env[68437]: _type = "Task" [ 710.889875] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.901015] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943734, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.917879] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6557e002-4858-479c-bd4a-1997ba1b156d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.925856] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e758c8-a2c6-4eaa-9926-ccd3dcf50b8c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.960864] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8957b1e-ee00-496f-9bc8-56e904c28361 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.971603] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5238ce3f-839a-4f1e-8dbf-c56f1570e113 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.987836] env[68437]: DEBUG nova.compute.provider_tree [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.152782] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52512ff3-3e94-a130-a731-bc7cc81752da, 'name': SearchDatastore_Task, 'duration_secs': 0.008245} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.152782] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.152782] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] cf394b0b-cb14-4ae1-81bb-622c951bfdab/cf394b0b-cb14-4ae1-81bb-622c951bfdab.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 711.152782] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22efaab1-3775-4e9d-8a36-3b8ac92da1df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.160209] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 711.160209] env[68437]: value = "task-2943735" [ 711.160209] env[68437]: _type = "Task" [ 711.160209] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.168899] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943735, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.183352] env[68437]: DEBUG nova.network.neutron [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Updated VIF entry in instance network info cache for port 77d0b04c-ecff-4b2e-a001-7248da043b47. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 711.183733] env[68437]: DEBUG nova.network.neutron [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Updating instance_info_cache with network_info: [{"id": "77d0b04c-ecff-4b2e-a001-7248da043b47", "address": "fa:16:3e:2e:b7:05", "network": {"id": "2931ca1e-83cd-4f65-9712-488dad412e0d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-642400798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63985eb5e5fb47958fd673bd0ce73f2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77d0b04c-ec", "ovs_interfaceid": "77d0b04c-ecff-4b2e-a001-7248da043b47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.405738] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943734, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.492336] env[68437]: DEBUG nova.scheduler.client.report [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 711.517584] env[68437]: DEBUG nova.compute.manager [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 711.569167] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 711.569167] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.569344] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 711.569390] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.570370] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 711.570370] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 711.570370] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 711.570370] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 711.570370] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 711.570627] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 711.570732] env[68437]: DEBUG nova.virt.hardware [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 711.571838] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a91ef42-0072-4ae8-8412-acbc743905f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.583511] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18864238-429e-4902-955c-dcac3ba2eff6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.675129] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943735, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473718} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.675129] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] cf394b0b-cb14-4ae1-81bb-622c951bfdab/cf394b0b-cb14-4ae1-81bb-622c951bfdab.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 711.675129] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 711.675630] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ea4f3bb-0001-48a3-b167-050054aa1cd3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.684844] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 711.684844] env[68437]: value = "task-2943736" [ 711.684844] env[68437]: _type = "Task" [ 711.684844] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.693451] env[68437]: DEBUG oslo_concurrency.lockutils [req-ba515191-8a04-49eb-81f0-d587af120645 req-fbcf8633-ce8e-4cd2-a43f-1076c10974df service nova] Releasing lock "refresh_cache-cf394b0b-cb14-4ae1-81bb-622c951bfdab" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.700810] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.740764] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "f1230046-d368-40ee-b1fa-99df4ab15a10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.741293] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "f1230046-d368-40ee-b1fa-99df4ab15a10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.903388] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943734, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.000575] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.497s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.001161] env[68437]: DEBUG nova.compute.manager [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 712.003678] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.790s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.005097] env[68437]: INFO nova.compute.claims [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.194626] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074477} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.194956] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 712.196256] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186e66d1-6dea-48cb-8d76-894606d56a10 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.225108] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] cf394b0b-cb14-4ae1-81bb-622c951bfdab/cf394b0b-cb14-4ae1-81bb-622c951bfdab.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 712.225484] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-998e1e89-39a5-409f-898b-68747d65eca2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.246032] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 712.246032] env[68437]: value = "task-2943737" [ 712.246032] env[68437]: _type = "Task" [ 712.246032] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.254825] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943737, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.403144] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943734, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.518036] env[68437]: DEBUG nova.compute.utils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 712.523254] env[68437]: DEBUG nova.compute.manager [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 712.523254] env[68437]: DEBUG nova.network.neutron [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 712.610638] env[68437]: DEBUG nova.policy [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8f30aea8b2b4ea1a6eb7d30875a4c5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '188b948736f44dfa8dd9aeb258180c58', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 712.644767] env[68437]: INFO nova.compute.manager [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Rebuilding instance [ 712.706515] env[68437]: DEBUG nova.compute.manager [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 712.710271] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a5ce41-08c4-47ed-8e8c-99ceffc4ce80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.761949] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943737, 'name': ReconfigVM_Task, 'duration_secs': 0.324337} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.762339] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Reconfigured VM instance instance-00000011 to attach disk [datastore2] cf394b0b-cb14-4ae1-81bb-622c951bfdab/cf394b0b-cb14-4ae1-81bb-622c951bfdab.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.763033] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19204a61-e72c-4581-8227-aa3faa7f36f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.771581] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 712.771581] env[68437]: value = "task-2943738" [ 712.771581] env[68437]: _type = "Task" [ 712.771581] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.782315] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943738, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.856301] env[68437]: DEBUG nova.network.neutron [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Successfully updated port: 020f4f15-f02d-4a17-a872-71d79b1ea226 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 712.906815] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943734, 'name': CloneVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.026693] env[68437]: DEBUG nova.compute.manager [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 713.146786] env[68437]: DEBUG nova.network.neutron [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Successfully created port: b8341297-717e-4e99-89cc-784162d9ffb3 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 713.186945] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "e3855111-7678-42c5-a37e-25e8587416aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.187203] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "e3855111-7678-42c5-a37e-25e8587416aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.285904] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943738, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.359173] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "refresh_cache-cf691a81-60e3-40ed-ba80-8f481ff2554b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.359343] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "refresh_cache-cf691a81-60e3-40ed-ba80-8f481ff2554b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.360250] env[68437]: DEBUG nova.network.neutron [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 713.407848] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943734, 'name': CloneVM_Task, 'duration_secs': 2.105292} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.411244] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Created linked-clone VM from snapshot [ 713.412193] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8379f4-68e8-4cbb-8a50-81a205874497 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.420599] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Uploading image d893745e-cd23-4869-b546-69f9b690cc74 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 713.461724] env[68437]: DEBUG oslo_vmware.rw_handles [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 713.461724] env[68437]: value = "vm-590899" [ 713.461724] env[68437]: _type = "VirtualMachine" [ 713.461724] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 713.462311] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f3c6b148-c0a4-42f0-8d04-bc64d65301f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.474593] env[68437]: DEBUG oslo_vmware.rw_handles [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lease: (returnval){ [ 713.474593] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52017e8c-3fd6-3fc0-b6c9-81c5298a71d1" [ 713.474593] env[68437]: _type = "HttpNfcLease" [ 713.474593] env[68437]: } obtained for exporting VM: (result){ [ 713.474593] env[68437]: value = "vm-590899" [ 713.474593] env[68437]: _type = "VirtualMachine" [ 713.474593] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 713.476449] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the lease: (returnval){ [ 713.476449] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52017e8c-3fd6-3fc0-b6c9-81c5298a71d1" [ 713.476449] env[68437]: _type = "HttpNfcLease" [ 713.476449] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 713.484370] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 713.484370] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52017e8c-3fd6-3fc0-b6c9-81c5298a71d1" [ 713.484370] env[68437]: _type = "HttpNfcLease" [ 713.484370] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 713.626576] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accf3c45-c9b8-458a-a0c4-6cccc7e360a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.635255] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70b33c1-4939-4471-a795-1523a8aee22d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.670874] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7240dc02-901d-4950-883c-743d9103c7de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.679054] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9d1504-3a65-4bee-bb4e-593250437efe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.697341] env[68437]: DEBUG nova.compute.provider_tree [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.734512] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 713.734810] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4173237-fc43-407f-a461-d910cdfc02ad {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.741538] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 713.741538] env[68437]: value = "task-2943740" [ 713.741538] env[68437]: _type = "Task" [ 713.741538] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.750887] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.784647] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943738, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.901517] env[68437]: DEBUG nova.compute.manager [req-443d0af8-3092-4d21-9798-3baa53e44a3c req-28ffcd0a-de15-452e-9b23-c04bc47d51c7 service nova] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Received event network-vif-plugged-020f4f15-f02d-4a17-a872-71d79b1ea226 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 713.901778] env[68437]: DEBUG oslo_concurrency.lockutils [req-443d0af8-3092-4d21-9798-3baa53e44a3c req-28ffcd0a-de15-452e-9b23-c04bc47d51c7 service nova] Acquiring lock "cf691a81-60e3-40ed-ba80-8f481ff2554b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.903389] env[68437]: DEBUG oslo_concurrency.lockutils [req-443d0af8-3092-4d21-9798-3baa53e44a3c req-28ffcd0a-de15-452e-9b23-c04bc47d51c7 service nova] Lock "cf691a81-60e3-40ed-ba80-8f481ff2554b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.903389] env[68437]: DEBUG oslo_concurrency.lockutils [req-443d0af8-3092-4d21-9798-3baa53e44a3c req-28ffcd0a-de15-452e-9b23-c04bc47d51c7 service nova] Lock "cf691a81-60e3-40ed-ba80-8f481ff2554b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.903389] env[68437]: DEBUG nova.compute.manager [req-443d0af8-3092-4d21-9798-3baa53e44a3c req-28ffcd0a-de15-452e-9b23-c04bc47d51c7 service nova] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] No waiting events found dispatching network-vif-plugged-020f4f15-f02d-4a17-a872-71d79b1ea226 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 713.903389] env[68437]: WARNING nova.compute.manager [req-443d0af8-3092-4d21-9798-3baa53e44a3c req-28ffcd0a-de15-452e-9b23-c04bc47d51c7 service nova] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Received unexpected event network-vif-plugged-020f4f15-f02d-4a17-a872-71d79b1ea226 for instance with vm_state building and task_state spawning. [ 713.908146] env[68437]: DEBUG nova.network.neutron [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 713.984150] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 713.984150] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52017e8c-3fd6-3fc0-b6c9-81c5298a71d1" [ 713.984150] env[68437]: _type = "HttpNfcLease" [ 713.984150] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 713.985081] env[68437]: DEBUG oslo_vmware.rw_handles [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 713.985081] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52017e8c-3fd6-3fc0-b6c9-81c5298a71d1" [ 713.985081] env[68437]: _type = "HttpNfcLease" [ 713.985081] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 713.986174] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd851ba-6699-4d13-9561-388d419e3d24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.000090] env[68437]: DEBUG oslo_vmware.rw_handles [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd49c-0374-238c-fd9f-a83fc2dfdd8f/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 714.002024] env[68437]: DEBUG oslo_vmware.rw_handles [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd49c-0374-238c-fd9f-a83fc2dfdd8f/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 714.063025] env[68437]: DEBUG nova.compute.manager [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 714.093310] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 714.093708] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 714.094217] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 714.094471] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 714.095286] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 714.095286] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 714.095286] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 714.095286] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 714.095509] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 714.095763] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 714.095892] env[68437]: DEBUG nova.virt.hardware [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 714.096938] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e9061d-d9d7-4dda-b861-bfc6b741b776 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.107511] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddba5b23-55d6-4579-990c-e53f7bf8d54b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.123303] env[68437]: DEBUG nova.network.neutron [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Updating instance_info_cache with network_info: [{"id": "020f4f15-f02d-4a17-a872-71d79b1ea226", "address": "fa:16:3e:7c:95:78", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap020f4f15-f0", "ovs_interfaceid": "020f4f15-f02d-4a17-a872-71d79b1ea226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.128680] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-882b1d51-f413-4454-bfb3-db4a858601d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.200798] env[68437]: DEBUG nova.scheduler.client.report [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 714.256666] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943740, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.289208] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943738, 'name': Rename_Task, 'duration_secs': 1.147537} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.289208] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 714.289208] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77effbc5-c535-40cc-a5b7-690c15dc588b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.299345] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 714.299345] env[68437]: value = "task-2943741" [ 714.299345] env[68437]: _type = "Task" [ 714.299345] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.311522] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943741, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.628778] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "refresh_cache-cf691a81-60e3-40ed-ba80-8f481ff2554b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.629295] env[68437]: DEBUG nova.compute.manager [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Instance network_info: |[{"id": "020f4f15-f02d-4a17-a872-71d79b1ea226", "address": "fa:16:3e:7c:95:78", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap020f4f15-f0", "ovs_interfaceid": "020f4f15-f02d-4a17-a872-71d79b1ea226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 714.630561] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:95:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd48f0ef6-34e5-44d4-8baf-4470ed96ce73', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '020f4f15-f02d-4a17-a872-71d79b1ea226', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 714.641570] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 714.642138] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 714.646363] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b06975a4-54a9-4ab4-ae7b-6670a7c527cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.675493] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 714.675493] env[68437]: value = "task-2943742" [ 714.675493] env[68437]: _type = "Task" [ 714.675493] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.686015] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943742, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.709018] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.710105] env[68437]: DEBUG nova.compute.manager [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 714.715108] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.824s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.718046] env[68437]: INFO nova.compute.claims [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.755902] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943740, 'name': PowerOffVM_Task, 'duration_secs': 0.541164} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.756285] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 714.756937] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 714.760802] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15bfc08-c345-4fd2-85f0-be1dda8e4696 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.769478] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 714.769888] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-188ef951-8781-44da-9b80-580a4e39dd52 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.809061] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943741, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.836411] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 714.844659] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 714.844659] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleting the datastore file [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 714.844659] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34990889-ca15-4ef8-9b51-2f6db4f283e1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.848064] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 714.848064] env[68437]: value = "task-2943744" [ 714.848064] env[68437]: _type = "Task" [ 714.848064] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.861328] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.119785] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "39c532b1-b05e-4354-ad8f-9223b06e9488" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.121198] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "39c532b1-b05e-4354-ad8f-9223b06e9488" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.186968] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943742, 'name': CreateVM_Task, 'duration_secs': 0.385561} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.187571] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 715.188342] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.188589] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.188872] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 715.189184] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d263eb78-9d8a-41e2-8262-2699b025532d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.201212] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 715.201212] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52715fab-7c96-4058-dcd8-8924e0b58d80" [ 715.201212] env[68437]: _type = "Task" [ 715.201212] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.213248] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52715fab-7c96-4058-dcd8-8924e0b58d80, 'name': SearchDatastore_Task, 'duration_secs': 0.009788} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.214427] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.214427] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.214964] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.215533] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.216234] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.218762] env[68437]: DEBUG nova.compute.utils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 715.220209] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dcbb765-587e-475a-bdb6-1b7544080477 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.224777] env[68437]: DEBUG nova.compute.manager [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 715.239265] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.239485] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 715.241159] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97ccaccc-bfa1-4d26-81c7-b66e2938570a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.246815] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 715.246815] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52291aa8-2daa-8987-7884-8dbe442e582a" [ 715.246815] env[68437]: _type = "Task" [ 715.246815] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.260700] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52291aa8-2daa-8987-7884-8dbe442e582a, 'name': SearchDatastore_Task, 'duration_secs': 0.009645} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.261878] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-351eb861-7d87-4ba8-a87a-1f861f5e9cfa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.268169] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 715.268169] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5266124f-693c-0582-4ef6-7ef998ae5a50" [ 715.268169] env[68437]: _type = "Task" [ 715.268169] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.276696] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5266124f-693c-0582-4ef6-7ef998ae5a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.311436] env[68437]: DEBUG oslo_vmware.api [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2943741, 'name': PowerOnVM_Task, 'duration_secs': 0.729009} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.311828] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 715.312671] env[68437]: INFO nova.compute.manager [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Took 8.40 seconds to spawn the instance on the hypervisor. [ 715.312671] env[68437]: DEBUG nova.compute.manager [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 715.313833] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4768a1-5860-49a0-8ee0-d1be717d17f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.364244] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219656} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.365054] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 715.365054] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 715.365823] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 715.709565] env[68437]: DEBUG nova.network.neutron [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Successfully updated port: b8341297-717e-4e99-89cc-784162d9ffb3 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 715.726407] env[68437]: DEBUG nova.compute.manager [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 715.790663] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5266124f-693c-0582-4ef6-7ef998ae5a50, 'name': SearchDatastore_Task, 'duration_secs': 0.009362} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.792564] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.793090] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] cf691a81-60e3-40ed-ba80-8f481ff2554b/cf691a81-60e3-40ed-ba80-8f481ff2554b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 715.793531] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1b11162-9e55-4acb-a84c-88afd3bf4d39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.805993] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 715.805993] env[68437]: value = "task-2943745" [ 715.805993] env[68437]: _type = "Task" [ 715.805993] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.819804] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.835503] env[68437]: INFO nova.compute.manager [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Took 26.02 seconds to build instance. [ 715.958397] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "5202b708-179c-48d2-9c4e-2bb5ab1a6ebb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.958653] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "5202b708-179c-48d2-9c4e-2bb5ab1a6ebb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.213723] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "refresh_cache-d5db3112-88c7-43af-a434-b91ca69f8559" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.213923] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "refresh_cache-d5db3112-88c7-43af-a434-b91ca69f8559" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.214167] env[68437]: DEBUG nova.network.neutron [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 716.320752] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943745, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.340535] env[68437]: DEBUG oslo_concurrency.lockutils [None req-344aff54-3ed7-4be8-9ca1-d0a0acb395a6 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.605s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.364679] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdbc61b-d492-475a-aab9-46c75b69f8e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.374518] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b5f571-1616-4c86-a6fa-28d56b36fe5d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.413917] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6991ab94-5fe7-44d0-9f16-ac6d67a7e081 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.422442] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a96c2f-6b02-4d03-bd8b-62288aac2d98 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.428479] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 716.428727] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 716.428887] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 716.429079] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 716.429249] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 716.429368] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 716.429577] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 716.429731] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 716.429904] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 716.430112] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 716.430367] env[68437]: DEBUG nova.virt.hardware [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 716.431496] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bdfa51-c164-4b43-8180-5fbfe6057878 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.446146] env[68437]: DEBUG nova.compute.provider_tree [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.451297] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2787553-224e-4d6e-aebb-c2f728a11105 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.469626] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:d1:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b158664-3a56-450a-8a96-2e42835511e3', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 716.477015] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 716.477838] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 716.478060] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6dd9ffa0-5622-4c18-97ff-10020f765bdd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.498619] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 716.498619] env[68437]: value = "task-2943746" [ 716.498619] env[68437]: _type = "Task" [ 716.498619] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.506582] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943746, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.559246] env[68437]: DEBUG nova.compute.manager [req-680c58b4-bb9e-4c40-a8f9-965375da7514 req-2aa858ca-8e68-48d7-be9e-e8e98fcdf8ee service nova] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Received event network-vif-plugged-b8341297-717e-4e99-89cc-784162d9ffb3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 716.559508] env[68437]: DEBUG oslo_concurrency.lockutils [req-680c58b4-bb9e-4c40-a8f9-965375da7514 req-2aa858ca-8e68-48d7-be9e-e8e98fcdf8ee service nova] Acquiring lock "d5db3112-88c7-43af-a434-b91ca69f8559-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.559820] env[68437]: DEBUG oslo_concurrency.lockutils [req-680c58b4-bb9e-4c40-a8f9-965375da7514 req-2aa858ca-8e68-48d7-be9e-e8e98fcdf8ee service nova] Lock "d5db3112-88c7-43af-a434-b91ca69f8559-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.559964] env[68437]: DEBUG oslo_concurrency.lockutils [req-680c58b4-bb9e-4c40-a8f9-965375da7514 req-2aa858ca-8e68-48d7-be9e-e8e98fcdf8ee service nova] Lock "d5db3112-88c7-43af-a434-b91ca69f8559-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.560078] env[68437]: DEBUG nova.compute.manager [req-680c58b4-bb9e-4c40-a8f9-965375da7514 req-2aa858ca-8e68-48d7-be9e-e8e98fcdf8ee service nova] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] No waiting events found dispatching network-vif-plugged-b8341297-717e-4e99-89cc-784162d9ffb3 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 716.560256] env[68437]: WARNING nova.compute.manager [req-680c58b4-bb9e-4c40-a8f9-965375da7514 req-2aa858ca-8e68-48d7-be9e-e8e98fcdf8ee service nova] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Received unexpected event network-vif-plugged-b8341297-717e-4e99-89cc-784162d9ffb3 for instance with vm_state building and task_state spawning. [ 716.739556] env[68437]: DEBUG nova.compute.manager [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 716.764959] env[68437]: DEBUG nova.network.neutron [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 716.768889] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 716.769169] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 716.769340] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 716.769566] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 716.769725] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 716.769873] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 716.770093] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 716.770259] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 716.770435] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 716.770609] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 716.770793] env[68437]: DEBUG nova.virt.hardware [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 716.772197] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6abe173-c200-487a-9371-223f602aec51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.780730] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bad022-4a74-464f-9fc7-f38baa717267 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.797111] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 716.803915] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Creating folder: Project (0440009c3aac4901b2c88adce2c33341). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 716.806599] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed55e5ae-5109-46e7-adaf-a91a071e43c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.817616] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943745, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542142} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.817978] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] cf691a81-60e3-40ed-ba80-8f481ff2554b/cf691a81-60e3-40ed-ba80-8f481ff2554b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 716.818462] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 716.819703] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9acad672-e11c-4ef9-aa34-b3f9010fce26 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.822750] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Created folder: Project (0440009c3aac4901b2c88adce2c33341) in parent group-v590848. [ 716.822979] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Creating folder: Instances. Parent ref: group-v590902. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 716.823316] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa1bfbff-ce5e-4e80-b548-5aa189da4067 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.829236] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 716.829236] env[68437]: value = "task-2943748" [ 716.829236] env[68437]: _type = "Task" [ 716.829236] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.834285] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Created folder: Instances in parent group-v590902. [ 716.834526] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 716.835121] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 716.835514] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a3269c5-5539-4eae-a5ae-dcdff73e302e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.853790] env[68437]: DEBUG nova.compute.manager [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 716.857379] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943748, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.863537] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 716.863537] env[68437]: value = "task-2943750" [ 716.863537] env[68437]: _type = "Task" [ 716.863537] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.874591] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943750, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.903041] env[68437]: DEBUG nova.compute.manager [req-09dd159f-345e-4dbd-af65-89dc646e4343 req-316cc776-f022-4ead-a18c-440de400f08c service nova] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Received event network-changed-020f4f15-f02d-4a17-a872-71d79b1ea226 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 716.905301] env[68437]: DEBUG nova.compute.manager [req-09dd159f-345e-4dbd-af65-89dc646e4343 req-316cc776-f022-4ead-a18c-440de400f08c service nova] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Refreshing instance network info cache due to event network-changed-020f4f15-f02d-4a17-a872-71d79b1ea226. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 716.905301] env[68437]: DEBUG oslo_concurrency.lockutils [req-09dd159f-345e-4dbd-af65-89dc646e4343 req-316cc776-f022-4ead-a18c-440de400f08c service nova] Acquiring lock "refresh_cache-cf691a81-60e3-40ed-ba80-8f481ff2554b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.905301] env[68437]: DEBUG oslo_concurrency.lockutils [req-09dd159f-345e-4dbd-af65-89dc646e4343 req-316cc776-f022-4ead-a18c-440de400f08c service nova] Acquired lock "refresh_cache-cf691a81-60e3-40ed-ba80-8f481ff2554b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.905301] env[68437]: DEBUG nova.network.neutron [req-09dd159f-345e-4dbd-af65-89dc646e4343 req-316cc776-f022-4ead-a18c-440de400f08c service nova] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Refreshing network info cache for port 020f4f15-f02d-4a17-a872-71d79b1ea226 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 716.948861] env[68437]: DEBUG nova.scheduler.client.report [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 717.008814] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943746, 'name': CreateVM_Task, 'duration_secs': 0.431166} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.009047] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 717.009590] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.009749] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.010084] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 717.010345] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-069e889e-af8b-41ce-b3f9-396931bde6f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.015320] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 717.015320] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525012ec-1ce6-d350-6c45-2639f193f458" [ 717.015320] env[68437]: _type = "Task" [ 717.015320] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.027160] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525012ec-1ce6-d350-6c45-2639f193f458, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.074099] env[68437]: DEBUG nova.network.neutron [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Updating instance_info_cache with network_info: [{"id": "b8341297-717e-4e99-89cc-784162d9ffb3", "address": "fa:16:3e:71:46:15", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8341297-71", "ovs_interfaceid": "b8341297-717e-4e99-89cc-784162d9ffb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.344370] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943748, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07012} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.344370] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 717.344370] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12615ee4-a3db-4355-b99c-17d339ec0b13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.371715] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] cf691a81-60e3-40ed-ba80-8f481ff2554b/cf691a81-60e3-40ed-ba80-8f481ff2554b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 717.374510] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afe49699-89c6-4089-9dc2-d2844d43ab4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.400604] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943750, 'name': CreateVM_Task, 'duration_secs': 0.405877} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.402558] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 717.403113] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 717.403113] env[68437]: value = "task-2943751" [ 717.403113] env[68437]: _type = "Task" [ 717.403113] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.404704] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.405014] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.416265] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943751, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.455966] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.741s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.458020] env[68437]: DEBUG nova.compute.manager [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 717.461271] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 18.473s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.526349] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525012ec-1ce6-d350-6c45-2639f193f458, 'name': SearchDatastore_Task, 'duration_secs': 0.00989} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.526935] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.527217] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 717.527512] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.527674] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.527865] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 717.528188] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.528502] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 717.528738] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dd0a251-4922-460a-aca9-8fc8c84efb4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.531081] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d874a25-e629-4189-97fc-feed91ce23ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.536766] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 717.536766] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520bd5fb-7c3a-b6e0-de0e-df21cea0bd2a" [ 717.536766] env[68437]: _type = "Task" [ 717.536766] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.541265] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 717.541442] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 717.542483] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ca97e6b-89c9-4087-8866-c1748c168cc6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.548137] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520bd5fb-7c3a-b6e0-de0e-df21cea0bd2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.550609] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 717.550609] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52589ef7-8511-9f0d-b838-108bd2095d74" [ 717.550609] env[68437]: _type = "Task" [ 717.550609] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.558363] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52589ef7-8511-9f0d-b838-108bd2095d74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.577248] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "refresh_cache-d5db3112-88c7-43af-a434-b91ca69f8559" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.577832] env[68437]: DEBUG nova.compute.manager [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Instance network_info: |[{"id": "b8341297-717e-4e99-89cc-784162d9ffb3", "address": "fa:16:3e:71:46:15", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8341297-71", "ovs_interfaceid": "b8341297-717e-4e99-89cc-784162d9ffb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 717.578357] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:46:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8341297-717e-4e99-89cc-784162d9ffb3', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.586593] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 717.586828] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 717.587410] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53895f7a-ee83-4d41-8118-4e15a4f22d13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.612674] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.612674] env[68437]: value = "task-2943752" [ 717.612674] env[68437]: _type = "Task" [ 717.612674] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.624100] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943752, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.781823] env[68437]: DEBUG nova.network.neutron [req-09dd159f-345e-4dbd-af65-89dc646e4343 req-316cc776-f022-4ead-a18c-440de400f08c service nova] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Updated VIF entry in instance network info cache for port 020f4f15-f02d-4a17-a872-71d79b1ea226. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 717.782427] env[68437]: DEBUG nova.network.neutron [req-09dd159f-345e-4dbd-af65-89dc646e4343 req-316cc776-f022-4ead-a18c-440de400f08c service nova] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Updating instance_info_cache with network_info: [{"id": "020f4f15-f02d-4a17-a872-71d79b1ea226", "address": "fa:16:3e:7c:95:78", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap020f4f15-f0", "ovs_interfaceid": "020f4f15-f02d-4a17-a872-71d79b1ea226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.917976] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943751, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.970272] env[68437]: DEBUG nova.compute.utils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 717.973588] env[68437]: INFO nova.compute.claims [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.978477] env[68437]: DEBUG nova.compute.manager [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 717.979343] env[68437]: DEBUG nova.network.neutron [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 718.029258] env[68437]: DEBUG nova.policy [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42280bc8e492437aa17259ace66e1601', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18b5eecfb2734eaf8288932f146e3d5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 718.049860] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520bd5fb-7c3a-b6e0-de0e-df21cea0bd2a, 'name': SearchDatastore_Task, 'duration_secs': 0.011377} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.049860] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.049860] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.049860] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.059930] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52589ef7-8511-9f0d-b838-108bd2095d74, 'name': SearchDatastore_Task, 'duration_secs': 0.008494} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.060700] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5281bb40-6bda-4b1c-9f54-ddb52f12e292 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.066102] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 718.066102] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bd32bf-f3ca-6424-472f-cfe49e1044ad" [ 718.066102] env[68437]: _type = "Task" [ 718.066102] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.074601] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bd32bf-f3ca-6424-472f-cfe49e1044ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.122321] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943752, 'name': CreateVM_Task, 'duration_secs': 0.435871} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.122528] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 718.123287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.123473] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.123793] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 718.124071] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f8f1b7f-f0e2-4aec-8cd2-b0666961e820 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.128914] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 718.128914] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f733cb-2234-9d1b-a45c-1842fee54e59" [ 718.128914] env[68437]: _type = "Task" [ 718.128914] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.137944] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f733cb-2234-9d1b-a45c-1842fee54e59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.285042] env[68437]: DEBUG oslo_concurrency.lockutils [req-09dd159f-345e-4dbd-af65-89dc646e4343 req-316cc776-f022-4ead-a18c-440de400f08c service nova] Releasing lock "refresh_cache-cf691a81-60e3-40ed-ba80-8f481ff2554b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.419090] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943751, 'name': ReconfigVM_Task, 'duration_secs': 0.520776} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.421093] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Reconfigured VM instance instance-00000012 to attach disk [datastore2] cf691a81-60e3-40ed-ba80-8f481ff2554b/cf691a81-60e3-40ed-ba80-8f481ff2554b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.421093] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07a19496-70c8-42fe-bdd9-cbab8f67068c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.431066] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 718.431066] env[68437]: value = "task-2943753" [ 718.431066] env[68437]: _type = "Task" [ 718.431066] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.440575] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943753, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.478586] env[68437]: DEBUG nova.compute.manager [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 718.488816] env[68437]: INFO nova.compute.resource_tracker [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating resource usage from migration 57158733-20f2-4deb-8251-47df03d5e04b [ 718.505115] env[68437]: DEBUG nova.network.neutron [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Successfully created port: 29fb0e88-6864-4d4b-b480-2f08c9984421 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.588287] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bd32bf-f3ca-6424-472f-cfe49e1044ad, 'name': SearchDatastore_Task, 'duration_secs': 0.010255} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.588287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.588287] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4/f517b14c-320f-4a6e-ae74-f2335e22f7a4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 718.588287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.588539] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.588539] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1683199e-1178-4091-8231-fe58c4319d23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.590600] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ae3cdf2-fdea-4014-a0e2-173788fe65df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.601837] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 718.601837] env[68437]: value = "task-2943754" [ 718.601837] env[68437]: _type = "Task" [ 718.601837] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.608056] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.609392] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 718.609953] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f974553-b059-4184-ae32-00ec134a1542 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.625025] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943754, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.625025] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 718.625025] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b68984-e19a-c51a-3d64-5166d46f26fb" [ 718.625025] env[68437]: _type = "Task" [ 718.625025] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.639807] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b68984-e19a-c51a-3d64-5166d46f26fb, 'name': SearchDatastore_Task, 'duration_secs': 0.009699} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.641142] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31259df1-791a-46df-bb04-cf7a1a58934e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.649411] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f733cb-2234-9d1b-a45c-1842fee54e59, 'name': SearchDatastore_Task, 'duration_secs': 0.016316} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.654733] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.654733] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.654733] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.655073] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.655073] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.661255] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-813fd7c3-188a-4cf4-8537-9d2b1142090b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.663670] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 718.663670] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f9a1dc-6d85-2ace-1b6b-102b6cf9260d" [ 718.663670] env[68437]: _type = "Task" [ 718.663670] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.673685] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f9a1dc-6d85-2ace-1b6b-102b6cf9260d, 'name': SearchDatastore_Task, 'duration_secs': 0.009195} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.674980] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.675952] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 718.676298] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.676441] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 718.679929] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a8df761-edaf-42fe-9ef4-6a69fbc5abdc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.682035] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-310f4547-76c0-4628-be71-4f5d136f1cb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.690291] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 718.690291] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52dc35c5-49cb-47c1-f604-aa7606d3da2d" [ 718.690291] env[68437]: _type = "Task" [ 718.690291] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.690569] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 718.690569] env[68437]: value = "task-2943755" [ 718.690569] env[68437]: _type = "Task" [ 718.690569] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.706074] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52dc35c5-49cb-47c1-f604-aa7606d3da2d, 'name': SearchDatastore_Task, 'duration_secs': 0.01102} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.710645] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.711401] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-737f83d1-3b8a-4414-85a1-537e319deb1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.717816] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 718.717816] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5216ef87-cfd2-1f40-7df7-4653d19df8b8" [ 718.717816] env[68437]: _type = "Task" [ 718.717816] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.730584] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5216ef87-cfd2-1f40-7df7-4653d19df8b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.945818] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943753, 'name': Rename_Task, 'duration_secs': 0.272644} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.945818] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 718.945818] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-981c4315-95f9-44ef-9781-cc01ec7ff7df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.957463] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 718.957463] env[68437]: value = "task-2943756" [ 718.957463] env[68437]: _type = "Task" [ 718.957463] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.973104] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.120427] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bcd85a-03af-4afd-82b9-1b422c96db51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.132020] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943754, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476147} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.133204] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4/f517b14c-320f-4a6e-ae74-f2335e22f7a4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 719.133629] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 719.134042] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea7a13b2-fc9d-4831-9131-8a3a12c306e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.147700] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 719.147700] env[68437]: value = "task-2943757" [ 719.147700] env[68437]: _type = "Task" [ 719.147700] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.148976] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26933e3c-1f77-45da-89ab-d1e79c15156e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.325345] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7f273c-bade-4e52-b097-5626d5201089 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.325345] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943757, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.325345] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943755, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.325345] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28281b44-b77a-49e0-a6cf-04e353772ab0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.325345] env[68437]: DEBUG nova.compute.provider_tree [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.327495] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5216ef87-cfd2-1f40-7df7-4653d19df8b8, 'name': SearchDatastore_Task, 'duration_secs': 0.012008} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.327495] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.327495] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] d5db3112-88c7-43af-a434-b91ca69f8559/d5db3112-88c7-43af-a434-b91ca69f8559.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 719.327495] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e64e80f-3377-4aa1-a2d8-4e0bb65c56da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.327495] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 719.327495] env[68437]: value = "task-2943758" [ 719.327495] env[68437]: _type = "Task" [ 719.330855] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.330855] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.330855] env[68437]: DEBUG nova.compute.manager [req-c0882eb2-6146-41c9-be96-901c6173186d req-906d722c-e2ed-41ba-a585-24b198993eff service nova] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Received event network-changed-b8341297-717e-4e99-89cc-784162d9ffb3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 719.330855] env[68437]: DEBUG nova.compute.manager [req-c0882eb2-6146-41c9-be96-901c6173186d req-906d722c-e2ed-41ba-a585-24b198993eff service nova] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Refreshing instance network info cache due to event network-changed-b8341297-717e-4e99-89cc-784162d9ffb3. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 719.330855] env[68437]: DEBUG oslo_concurrency.lockutils [req-c0882eb2-6146-41c9-be96-901c6173186d req-906d722c-e2ed-41ba-a585-24b198993eff service nova] Acquiring lock "refresh_cache-d5db3112-88c7-43af-a434-b91ca69f8559" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.330855] env[68437]: DEBUG oslo_concurrency.lockutils [req-c0882eb2-6146-41c9-be96-901c6173186d req-906d722c-e2ed-41ba-a585-24b198993eff service nova] Acquired lock "refresh_cache-d5db3112-88c7-43af-a434-b91ca69f8559" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.331824] env[68437]: DEBUG nova.network.neutron [req-c0882eb2-6146-41c9-be96-901c6173186d req-906d722c-e2ed-41ba-a585-24b198993eff service nova] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Refreshing network info cache for port b8341297-717e-4e99-89cc-784162d9ffb3 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 719.478060] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943756, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.495923] env[68437]: DEBUG nova.compute.manager [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 719.551283] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 719.552188] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.552188] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 719.552188] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.552188] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 719.554784] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 719.556801] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 719.557157] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 719.557418] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 719.557664] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 719.558092] env[68437]: DEBUG nova.virt.hardware [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 719.562377] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfab4d21-944d-4ece-9c30-be573810bddc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.575759] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adbf242-0ddf-4530-b31b-ac5a0b425264 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.663876] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943757, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10691} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.664693] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 719.667099] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3874976b-ab1f-4e6e-95f6-d96681f065be {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.699021] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4/f517b14c-320f-4a6e-ae74-f2335e22f7a4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 719.699021] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-679192d3-572b-4076-8305-92661987c4ea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.725881] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712262} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.727560] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 719.727794] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 719.728394] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 719.728394] env[68437]: value = "task-2943759" [ 719.728394] env[68437]: _type = "Task" [ 719.728394] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.728755] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-416d9212-b81a-4ca4-b5f2-3e175699cc11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.740544] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943759, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.742441] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 719.742441] env[68437]: value = "task-2943760" [ 719.742441] env[68437]: _type = "Task" [ 719.742441] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.756200] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943760, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.760212] env[68437]: DEBUG nova.scheduler.client.report [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 719.774980] env[68437]: DEBUG nova.compute.manager [req-35c24391-b94c-4071-ab9f-06d924835804 req-07a901fa-8e0c-4a80-a6f9-7be62082ffc1 service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Received event network-changed-77d0b04c-ecff-4b2e-a001-7248da043b47 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 719.775218] env[68437]: DEBUG nova.compute.manager [req-35c24391-b94c-4071-ab9f-06d924835804 req-07a901fa-8e0c-4a80-a6f9-7be62082ffc1 service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Refreshing instance network info cache due to event network-changed-77d0b04c-ecff-4b2e-a001-7248da043b47. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 719.775463] env[68437]: DEBUG oslo_concurrency.lockutils [req-35c24391-b94c-4071-ab9f-06d924835804 req-07a901fa-8e0c-4a80-a6f9-7be62082ffc1 service nova] Acquiring lock "refresh_cache-cf394b0b-cb14-4ae1-81bb-622c951bfdab" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.775688] env[68437]: DEBUG oslo_concurrency.lockutils [req-35c24391-b94c-4071-ab9f-06d924835804 req-07a901fa-8e0c-4a80-a6f9-7be62082ffc1 service nova] Acquired lock "refresh_cache-cf394b0b-cb14-4ae1-81bb-622c951bfdab" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.775850] env[68437]: DEBUG nova.network.neutron [req-35c24391-b94c-4071-ab9f-06d924835804 req-07a901fa-8e0c-4a80-a6f9-7be62082ffc1 service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Refreshing network info cache for port 77d0b04c-ecff-4b2e-a001-7248da043b47 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 719.786017] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943758, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.977035] env[68437]: DEBUG oslo_vmware.api [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943756, 'name': PowerOnVM_Task, 'duration_secs': 0.620835} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.977035] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 719.977035] env[68437]: INFO nova.compute.manager [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Took 8.46 seconds to spawn the instance on the hypervisor. [ 719.977035] env[68437]: DEBUG nova.compute.manager [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 719.977035] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cd5b9e-a1e4-43d7-ac21-599ecad72f3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.132187] env[68437]: DEBUG nova.network.neutron [req-c0882eb2-6146-41c9-be96-901c6173186d req-906d722c-e2ed-41ba-a585-24b198993eff service nova] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Updated VIF entry in instance network info cache for port b8341297-717e-4e99-89cc-784162d9ffb3. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 720.135889] env[68437]: DEBUG nova.network.neutron [req-c0882eb2-6146-41c9-be96-901c6173186d req-906d722c-e2ed-41ba-a585-24b198993eff service nova] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Updating instance_info_cache with network_info: [{"id": "b8341297-717e-4e99-89cc-784162d9ffb3", "address": "fa:16:3e:71:46:15", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8341297-71", "ovs_interfaceid": "b8341297-717e-4e99-89cc-784162d9ffb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.243294] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943759, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.253096] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943760, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107087} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.253574] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 720.254845] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5056f61d-d36b-458f-b07a-ff6f8773005a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.276056] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 720.276943] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.818s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.277127] env[68437]: INFO nova.compute.manager [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Migrating [ 720.277367] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.277532] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "compute-rpcapi-router" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.278907] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b56581c-7572-47d2-8984-0eb055c4cfef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.294551] env[68437]: INFO nova.compute.rpcapi [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 720.295314] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "compute-rpcapi-router" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.301480] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.867s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.303110] env[68437]: INFO nova.compute.claims [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.324246] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 720.324246] env[68437]: value = "task-2943761" [ 720.324246] env[68437]: _type = "Task" [ 720.324246] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.331880] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943758, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596196} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.332632] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] d5db3112-88c7-43af-a434-b91ca69f8559/d5db3112-88c7-43af-a434-b91ca69f8559.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 720.332895] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 720.333611] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99e38511-ac67-4b1a-8558-94db765141dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.340176] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.345315] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 720.345315] env[68437]: value = "task-2943762" [ 720.345315] env[68437]: _type = "Task" [ 720.345315] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.355592] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943762, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.496884] env[68437]: INFO nova.compute.manager [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Took 26.85 seconds to build instance. [ 720.582923] env[68437]: DEBUG nova.network.neutron [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Successfully updated port: 29fb0e88-6864-4d4b-b480-2f08c9984421 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 720.640285] env[68437]: DEBUG oslo_concurrency.lockutils [req-c0882eb2-6146-41c9-be96-901c6173186d req-906d722c-e2ed-41ba-a585-24b198993eff service nova] Releasing lock "refresh_cache-d5db3112-88c7-43af-a434-b91ca69f8559" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.641848] env[68437]: DEBUG nova.network.neutron [req-35c24391-b94c-4071-ab9f-06d924835804 req-07a901fa-8e0c-4a80-a6f9-7be62082ffc1 service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Updated VIF entry in instance network info cache for port 77d0b04c-ecff-4b2e-a001-7248da043b47. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 720.642261] env[68437]: DEBUG nova.network.neutron [req-35c24391-b94c-4071-ab9f-06d924835804 req-07a901fa-8e0c-4a80-a6f9-7be62082ffc1 service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Updating instance_info_cache with network_info: [{"id": "77d0b04c-ecff-4b2e-a001-7248da043b47", "address": "fa:16:3e:2e:b7:05", "network": {"id": "2931ca1e-83cd-4f65-9712-488dad412e0d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-642400798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63985eb5e5fb47958fd673bd0ce73f2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77d0b04c-ec", "ovs_interfaceid": "77d0b04c-ecff-4b2e-a001-7248da043b47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.742944] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943759, 'name': ReconfigVM_Task, 'duration_secs': 0.550618} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.743358] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Reconfigured VM instance instance-00000010 to attach disk [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4/f517b14c-320f-4a6e-ae74-f2335e22f7a4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.744201] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-767043d4-612b-4105-a8fd-94ec3f7d23f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.751537] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 720.751537] env[68437]: value = "task-2943763" [ 720.751537] env[68437]: _type = "Task" [ 720.751537] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.760787] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943763, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.826165] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.826165] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.826345] env[68437]: DEBUG nova.network.neutron [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 720.839501] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943761, 'name': ReconfigVM_Task, 'duration_secs': 0.482881} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.839889] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.841416] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c16ac8c-3c94-45d8-879d-9f933addd2ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.852578] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 720.852578] env[68437]: value = "task-2943764" [ 720.852578] env[68437]: _type = "Task" [ 720.852578] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.861438] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943762, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111376} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.862623] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 720.863653] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850ce504-baf5-46df-b3e7-76148d41d55a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.870711] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943764, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.894698] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] d5db3112-88c7-43af-a434-b91ca69f8559/d5db3112-88c7-43af-a434-b91ca69f8559.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 720.895054] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d98616e3-f8ed-4e96-8e3e-3cf7a24dd230 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.919876] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 720.919876] env[68437]: value = "task-2943765" [ 720.919876] env[68437]: _type = "Task" [ 720.919876] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.932206] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943765, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.002347] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d70abbd-b12b-416d-b44f-0ba2bf2608f9 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "cf691a81-60e3-40ed-ba80-8f481ff2554b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.248s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.086516] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "refresh_cache-d7c64aa1-44f8-44f4-9fb6-463033837736" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.090718] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired lock "refresh_cache-d7c64aa1-44f8-44f4-9fb6-463033837736" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.090718] env[68437]: DEBUG nova.network.neutron [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 721.145310] env[68437]: DEBUG oslo_concurrency.lockutils [req-35c24391-b94c-4071-ab9f-06d924835804 req-07a901fa-8e0c-4a80-a6f9-7be62082ffc1 service nova] Releasing lock "refresh_cache-cf394b0b-cb14-4ae1-81bb-622c951bfdab" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.262903] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943763, 'name': Rename_Task, 'duration_secs': 0.221305} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.263210] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 721.264949] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee43d655-1bc9-4f97-9bdf-16bfea14d74f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.273936] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 721.273936] env[68437]: value = "task-2943766" [ 721.273936] env[68437]: _type = "Task" [ 721.273936] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.284691] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943766, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.379386] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943764, 'name': Rename_Task, 'duration_secs': 0.193439} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.380748] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 721.381196] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b011caeb-b42e-4d7f-a19f-804aaa7c3e9e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.390378] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 721.390378] env[68437]: value = "task-2943767" [ 721.390378] env[68437]: _type = "Task" [ 721.390378] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.401029] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.436946] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943765, 'name': ReconfigVM_Task, 'duration_secs': 0.442779} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.436946] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Reconfigured VM instance instance-00000013 to attach disk [datastore2] d5db3112-88c7-43af-a434-b91ca69f8559/d5db3112-88c7-43af-a434-b91ca69f8559.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 721.438606] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a12a2623-b55e-491a-8d23-75a9de610f81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.443962] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 721.443962] env[68437]: value = "task-2943768" [ 721.443962] env[68437]: _type = "Task" [ 721.443962] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.456798] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943768, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.504581] env[68437]: DEBUG nova.compute.manager [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 721.550881] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "efed858a-44b9-45b7-8778-22183549088c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.550881] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "efed858a-44b9-45b7-8778-22183549088c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.637554] env[68437]: DEBUG oslo_vmware.rw_handles [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd49c-0374-238c-fd9f-a83fc2dfdd8f/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 721.639007] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415da2f0-6276-47a4-b68f-0610a0f9f9f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.648700] env[68437]: DEBUG oslo_vmware.rw_handles [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd49c-0374-238c-fd9f-a83fc2dfdd8f/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 721.648700] env[68437]: ERROR oslo_vmware.rw_handles [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd49c-0374-238c-fd9f-a83fc2dfdd8f/disk-0.vmdk due to incomplete transfer. [ 721.649423] env[68437]: DEBUG nova.network.neutron [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 721.651310] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bdb56548-15a8-4b1d-8334-003f95162719 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.659691] env[68437]: DEBUG oslo_vmware.rw_handles [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd49c-0374-238c-fd9f-a83fc2dfdd8f/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 721.659691] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Uploaded image d893745e-cd23-4869-b546-69f9b690cc74 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 721.660875] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 721.663745] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-75941f4b-72e1-459a-b5f6-9a7c744926da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.673942] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 721.673942] env[68437]: value = "task-2943769" [ 721.673942] env[68437]: _type = "Task" [ 721.673942] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.682269] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943769, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.752375] env[68437]: DEBUG nova.network.neutron [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance_info_cache with network_info: [{"id": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "address": "fa:16:3e:fc:76:b9", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd952c0-79", "ovs_interfaceid": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.784535] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943766, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.877404] env[68437]: DEBUG nova.network.neutron [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Updating instance_info_cache with network_info: [{"id": "29fb0e88-6864-4d4b-b480-2f08c9984421", "address": "fa:16:3e:8e:65:a1", "network": {"id": "2de0f0ee-17ab-497a-adc4-23b69d5d0a9f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-277054154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b5eecfb2734eaf8288932f146e3d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29fb0e88-68", "ovs_interfaceid": "29fb0e88-6864-4d4b-b480-2f08c9984421", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.907542] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943767, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.929945] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e44fba-207f-44f5-9480-9b302ce0f0dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.938679] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1344838-0c7f-4e03-bbf8-27e31a3581e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.978625] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb2e91f-0ed1-4fec-b95b-4c3755e903ec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.979332] env[68437]: DEBUG nova.compute.manager [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Received event network-vif-plugged-29fb0e88-6864-4d4b-b480-2f08c9984421 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 721.979525] env[68437]: DEBUG oslo_concurrency.lockutils [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] Acquiring lock "d7c64aa1-44f8-44f4-9fb6-463033837736-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.979809] env[68437]: DEBUG oslo_concurrency.lockutils [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] Lock "d7c64aa1-44f8-44f4-9fb6-463033837736-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.980131] env[68437]: DEBUG oslo_concurrency.lockutils [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] Lock "d7c64aa1-44f8-44f4-9fb6-463033837736-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.980361] env[68437]: DEBUG nova.compute.manager [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] No waiting events found dispatching network-vif-plugged-29fb0e88-6864-4d4b-b480-2f08c9984421 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 721.980524] env[68437]: WARNING nova.compute.manager [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Received unexpected event network-vif-plugged-29fb0e88-6864-4d4b-b480-2f08c9984421 for instance with vm_state building and task_state spawning. [ 721.980678] env[68437]: DEBUG nova.compute.manager [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Received event network-changed-29fb0e88-6864-4d4b-b480-2f08c9984421 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 721.980826] env[68437]: DEBUG nova.compute.manager [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Refreshing instance network info cache due to event network-changed-29fb0e88-6864-4d4b-b480-2f08c9984421. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 721.981144] env[68437]: DEBUG oslo_concurrency.lockutils [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] Acquiring lock "refresh_cache-d7c64aa1-44f8-44f4-9fb6-463033837736" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.985960] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943768, 'name': Rename_Task, 'duration_secs': 0.176582} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.986565] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 721.986830] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b5ab933-5241-40c0-abe9-f02157d64c3d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.991785] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c685748-d935-49ac-8ef3-d83ff15f84e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.997058] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 721.997058] env[68437]: value = "task-2943770" [ 721.997058] env[68437]: _type = "Task" [ 721.997058] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.010268] env[68437]: DEBUG nova.compute.provider_tree [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.018766] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943770, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.032629] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.184655] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943769, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.258410] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.287823] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943766, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.383021] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Releasing lock "refresh_cache-d7c64aa1-44f8-44f4-9fb6-463033837736" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.383021] env[68437]: DEBUG nova.compute.manager [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Instance network_info: |[{"id": "29fb0e88-6864-4d4b-b480-2f08c9984421", "address": "fa:16:3e:8e:65:a1", "network": {"id": "2de0f0ee-17ab-497a-adc4-23b69d5d0a9f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-277054154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b5eecfb2734eaf8288932f146e3d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29fb0e88-68", "ovs_interfaceid": "29fb0e88-6864-4d4b-b480-2f08c9984421", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 722.383368] env[68437]: DEBUG oslo_concurrency.lockutils [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] Acquired lock "refresh_cache-d7c64aa1-44f8-44f4-9fb6-463033837736" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.383368] env[68437]: DEBUG nova.network.neutron [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Refreshing network info cache for port 29fb0e88-6864-4d4b-b480-2f08c9984421 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 722.383368] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:65:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29fb0e88-6864-4d4b-b480-2f08c9984421', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 722.394162] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Creating folder: Project (18b5eecfb2734eaf8288932f146e3d5e). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.396306] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f42f69b-4e62-4338-b164-c072fb72967d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.409401] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943767, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.411100] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Created folder: Project (18b5eecfb2734eaf8288932f146e3d5e) in parent group-v590848. [ 722.411951] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Creating folder: Instances. Parent ref: group-v590906. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.411951] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b95ca5f3-25f6-4741-a148-69984f4aed63 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.420291] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Created folder: Instances in parent group-v590906. [ 722.420622] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 722.421027] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 722.421133] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22345b6a-d077-4781-b692-8793d8e74544 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.442456] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 722.442456] env[68437]: value = "task-2943773" [ 722.442456] env[68437]: _type = "Task" [ 722.442456] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.463017] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943773, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.509907] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943770, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.521573] env[68437]: DEBUG nova.scheduler.client.report [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 722.690328] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943769, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.786936] env[68437]: DEBUG oslo_vmware.api [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943766, 'name': PowerOnVM_Task, 'duration_secs': 1.102408} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.786936] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 722.786936] env[68437]: DEBUG nova.compute.manager [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 722.787843] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047278af-562d-47c5-a3f0-d4096083ca3f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.878963] env[68437]: INFO nova.compute.manager [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Rescuing [ 722.879467] env[68437]: DEBUG oslo_concurrency.lockutils [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "refresh_cache-cf691a81-60e3-40ed-ba80-8f481ff2554b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.879685] env[68437]: DEBUG oslo_concurrency.lockutils [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "refresh_cache-cf691a81-60e3-40ed-ba80-8f481ff2554b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.879951] env[68437]: DEBUG nova.network.neutron [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 722.910140] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943767, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.955332] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943773, 'name': CreateVM_Task, 'duration_secs': 0.365157} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.955548] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 722.956404] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.957158] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.957231] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 722.957534] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24a93983-73a6-44d9-8022-d0cb016da920 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.965035] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 722.965035] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a042a0-3f5e-b9f6-f50e-77146659fad4" [ 722.965035] env[68437]: _type = "Task" [ 722.965035] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.974997] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a042a0-3f5e-b9f6-f50e-77146659fad4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.014041] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943770, 'name': PowerOnVM_Task} progress is 92%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.029192] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.728s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.033022] env[68437]: DEBUG nova.compute.manager [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 723.034292] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.839s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.035974] env[68437]: INFO nova.compute.claims [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.188684] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943769, 'name': Destroy_Task, 'duration_secs': 1.307128} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.189649] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Destroyed the VM [ 723.190636] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 723.194309] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-173a5e69-0e94-47e5-9b66-22d0b5f4a27d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.204131] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 723.204131] env[68437]: value = "task-2943774" [ 723.204131] env[68437]: _type = "Task" [ 723.204131] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.213251] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943774, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.310275] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.313561] env[68437]: DEBUG nova.network.neutron [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Updated VIF entry in instance network info cache for port 29fb0e88-6864-4d4b-b480-2f08c9984421. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 723.313868] env[68437]: DEBUG nova.network.neutron [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Updating instance_info_cache with network_info: [{"id": "29fb0e88-6864-4d4b-b480-2f08c9984421", "address": "fa:16:3e:8e:65:a1", "network": {"id": "2de0f0ee-17ab-497a-adc4-23b69d5d0a9f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-277054154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b5eecfb2734eaf8288932f146e3d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29fb0e88-68", "ovs_interfaceid": "29fb0e88-6864-4d4b-b480-2f08c9984421", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.409590] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943767, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.480488] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a042a0-3f5e-b9f6-f50e-77146659fad4, 'name': SearchDatastore_Task, 'duration_secs': 0.010537} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.481687] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.481988] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 723.482283] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.482505] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.482751] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.483104] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60e18d37-1d70-4d9c-9c3f-7b651ff1921e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.492147] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 723.492363] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 723.493138] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a82286e6-cf40-4bb2-a1ea-bde783d2677b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.498480] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 723.498480] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529e5291-326b-4911-0971-b34072d7beb0" [ 723.498480] env[68437]: _type = "Task" [ 723.498480] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.513667] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529e5291-326b-4911-0971-b34072d7beb0, 'name': SearchDatastore_Task, 'duration_secs': 0.0088} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.513667] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-249ab67d-050f-4e88-b2e1-daa94345ddb6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.521134] env[68437]: DEBUG oslo_vmware.api [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943770, 'name': PowerOnVM_Task, 'duration_secs': 1.075135} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.521755] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 723.521972] env[68437]: INFO nova.compute.manager [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Took 9.46 seconds to spawn the instance on the hypervisor. [ 723.522258] env[68437]: DEBUG nova.compute.manager [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 723.523054] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ded897f-e2ef-4c2a-9d97-9a8f8f470f02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.526920] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 723.526920] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524f2e63-692e-cec2-410c-b07455262cb5" [ 723.526920] env[68437]: _type = "Task" [ 723.526920] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.535933] env[68437]: DEBUG nova.compute.utils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 723.544167] env[68437]: DEBUG nova.compute.manager [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 723.544379] env[68437]: DEBUG nova.network.neutron [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 723.548224] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524f2e63-692e-cec2-410c-b07455262cb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.599158] env[68437]: DEBUG nova.policy [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4a443d294814887be34ea3bdccf54f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a8ec160fb3148c6aa238e70a975496f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 723.713974] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943774, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.785112] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742a1b49-ddcb-4a4f-99a0-e563d3e2e916 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.810286] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance '1186da93-57aa-40f4-8aae-702d039844d4' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 723.816200] env[68437]: DEBUG oslo_concurrency.lockutils [req-1030a2da-fea0-4b74-b899-ac3681b31a43 req-5010e2a6-9428-4ecd-a01f-96e4e5b994d1 service nova] Releasing lock "refresh_cache-d7c64aa1-44f8-44f4-9fb6-463033837736" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.891742] env[68437]: DEBUG nova.network.neutron [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Updating instance_info_cache with network_info: [{"id": "020f4f15-f02d-4a17-a872-71d79b1ea226", "address": "fa:16:3e:7c:95:78", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap020f4f15-f0", "ovs_interfaceid": "020f4f15-f02d-4a17-a872-71d79b1ea226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.912486] env[68437]: DEBUG oslo_vmware.api [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943767, 'name': PowerOnVM_Task, 'duration_secs': 2.365757} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.912817] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 723.913037] env[68437]: INFO nova.compute.manager [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Took 7.17 seconds to spawn the instance on the hypervisor. [ 723.913260] env[68437]: DEBUG nova.compute.manager [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 723.915892] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5c5fbf-822c-44c0-a2a5-9242ab17be1a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.961361] env[68437]: DEBUG nova.network.neutron [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Successfully created port: 102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 724.041466] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524f2e63-692e-cec2-410c-b07455262cb5, 'name': SearchDatastore_Task, 'duration_secs': 0.013572} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.041897] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.042211] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] d7c64aa1-44f8-44f4-9fb6-463033837736/d7c64aa1-44f8-44f4-9fb6-463033837736.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 724.046754] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ffcb4d0-20ad-477e-bdd1-57cd893e6304 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.052127] env[68437]: DEBUG nova.compute.manager [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 724.057208] env[68437]: INFO nova.compute.manager [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Took 30.16 seconds to build instance. [ 724.064522] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 724.064522] env[68437]: value = "task-2943775" [ 724.064522] env[68437]: _type = "Task" [ 724.064522] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.074878] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943775, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.219903] env[68437]: DEBUG oslo_vmware.api [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943774, 'name': RemoveSnapshot_Task, 'duration_secs': 0.802798} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.220489] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 724.222511] env[68437]: INFO nova.compute.manager [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Took 15.43 seconds to snapshot the instance on the hypervisor. [ 724.319268] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 724.320209] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6599caa-6e6f-4238-978c-4a8d2bdf6bf0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.329857] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 724.329857] env[68437]: value = "task-2943776" [ 724.329857] env[68437]: _type = "Task" [ 724.329857] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.348703] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.394618] env[68437]: DEBUG oslo_concurrency.lockutils [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "refresh_cache-cf691a81-60e3-40ed-ba80-8f481ff2554b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.437861] env[68437]: INFO nova.compute.manager [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Took 26.25 seconds to build instance. [ 724.564316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2b02316f-4189-43c8-b3c5-b2dddaf77695 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "d5db3112-88c7-43af-a434-b91ca69f8559" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.479s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.582270] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943775, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464842} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.583343] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] d7c64aa1-44f8-44f4-9fb6-463033837736/d7c64aa1-44f8-44f4-9fb6-463033837736.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 724.583609] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 724.583875] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-799d708c-9050-4742-8703-9608c373757d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.594040] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 724.594040] env[68437]: value = "task-2943777" [ 724.594040] env[68437]: _type = "Task" [ 724.594040] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.603427] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.711944] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a0795c-f7bc-4c53-887b-6e73de49047e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.720688] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff0a835-113b-41d4-abb5-ff1691c2b677 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.725562] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "5abc2c5a-2177-4d77-97ce-872808bb47ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.725847] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.726067] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "5abc2c5a-2177-4d77-97ce-872808bb47ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.726253] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.726416] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.731819] env[68437]: INFO nova.compute.manager [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Terminating instance [ 724.735646] env[68437]: DEBUG nova.compute.manager [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Instance disappeared during snapshot {{(pid=68437) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 724.741270] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquiring lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.741369] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.741566] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquiring lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.741982] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.741982] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.777446] env[68437]: DEBUG nova.compute.manager [None req-40019e3a-0a08-463a-a6d7-1875d00414c7 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image not found during clean up d893745e-cd23-4869-b546-69f9b690cc74 {{(pid=68437) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 724.779141] env[68437]: DEBUG nova.compute.manager [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 724.779322] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 724.780156] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c694c21-d44d-4dbc-9183-c8837febd19e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.784581] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122b8f04-4178-44a9-acbb-b8f0a13a0a00 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.804802] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 724.806962] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bead79-d476-42bf-973b-9ff831f60aba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.811628] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f12c5036-d2bd-4cb3-9047-8a2055d2e369 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.825368] env[68437]: DEBUG nova.compute.provider_tree [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.828992] env[68437]: INFO nova.compute.manager [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Terminating instance [ 724.843760] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943776, 'name': PowerOffVM_Task, 'duration_secs': 0.319541} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.844144] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 724.844377] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance '1186da93-57aa-40f4-8aae-702d039844d4' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 724.890827] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 724.891064] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 724.891498] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleting the datastore file [datastore2] 5abc2c5a-2177-4d77-97ce-872808bb47ee {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 724.891756] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7434d37-50db-407d-927b-cab4ca25b067 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.899781] env[68437]: DEBUG oslo_vmware.api [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 724.899781] env[68437]: value = "task-2943779" [ 724.899781] env[68437]: _type = "Task" [ 724.899781] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.908772] env[68437]: DEBUG oslo_vmware.api [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943779, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.939598] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3e3dc0fe-6be4-442c-9026-916c7148c4eb tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "5435b4d8-46c3-43e3-b11b-cbeb580e2f36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.601s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.067899] env[68437]: DEBUG nova.compute.manager [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 725.073483] env[68437]: DEBUG nova.compute.manager [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 725.106474] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 725.106897] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.107078] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 725.109099] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.109099] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 725.109099] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 725.109099] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 725.109099] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 725.109514] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 725.109514] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 725.109514] env[68437]: DEBUG nova.virt.hardware [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 725.109514] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63886209-6dff-4a85-8941-f2904cbd2b15 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.120195] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.242038} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.124322] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 725.126013] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7b503e-acdf-4e99-97ef-346f66e0f986 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.130842] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992c4bf2-5233-4947-b6d7-ce0a0dc2664e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.169132] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] d7c64aa1-44f8-44f4-9fb6-463033837736/d7c64aa1-44f8-44f4-9fb6-463033837736.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 725.169910] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abeef5e8-a7c2-4c22-b2d4-3ab5b8c9114e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.194251] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 725.194251] env[68437]: value = "task-2943780" [ 725.194251] env[68437]: _type = "Task" [ 725.194251] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.203730] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943780, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.329829] env[68437]: DEBUG nova.scheduler.client.report [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 725.337235] env[68437]: DEBUG nova.compute.manager [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 725.337505] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 725.338434] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661443e0-9511-4edf-bbc1-3fee7f657ce1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.352551] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 725.352801] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.354690] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 725.354952] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.355419] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 725.356026] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 725.356026] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 725.356313] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 725.358018] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 725.358018] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 725.358018] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 725.364262] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 725.366641] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3226945a-554d-4907-872d-b98b64f1020e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.381216] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ecbdf84-a36e-434c-adea-0c118bf9d355 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.389880] env[68437]: DEBUG oslo_vmware.api [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 725.389880] env[68437]: value = "task-2943781" [ 725.389880] env[68437]: _type = "Task" [ 725.389880] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.392391] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 725.392391] env[68437]: value = "task-2943782" [ 725.392391] env[68437]: _type = "Task" [ 725.392391] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.413068] env[68437]: DEBUG oslo_vmware.api [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943781, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.419891] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943782, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.423672] env[68437]: DEBUG oslo_vmware.api [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943779, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137444} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.424065] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 725.424339] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 725.424861] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 725.424947] env[68437]: INFO nova.compute.manager [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Took 0.65 seconds to destroy the instance on the hypervisor. [ 725.425291] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 725.425586] env[68437]: DEBUG nova.compute.manager [-] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 725.425751] env[68437]: DEBUG nova.network.neutron [-] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 725.444665] env[68437]: DEBUG nova.compute.manager [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 725.529320] env[68437]: DEBUG nova.network.neutron [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Successfully updated port: 102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 725.619176] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.710283] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943780, 'name': ReconfigVM_Task, 'duration_secs': 0.401157} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.710504] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Reconfigured VM instance instance-00000015 to attach disk [datastore2] d7c64aa1-44f8-44f4-9fb6-463033837736/d7c64aa1-44f8-44f4-9fb6-463033837736.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 725.712789] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-608862a6-6fb1-45d3-93a4-b1d031bacc7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.718639] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 725.718639] env[68437]: value = "task-2943783" [ 725.718639] env[68437]: _type = "Task" [ 725.718639] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.729685] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943783, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.837654] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.802s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 725.837654] env[68437]: DEBUG nova.compute.manager [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 725.839573] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.351s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 725.841747] env[68437]: INFO nova.compute.claims [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.909456] env[68437]: DEBUG oslo_vmware.api [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943781, 'name': PowerOffVM_Task, 'duration_secs': 0.251661} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.914123] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 725.914123] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 725.915237] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943782, 'name': ReconfigVM_Task, 'duration_secs': 0.251613} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.915237] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc36727a-e090-49cd-8f71-7e2806b81ba1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.916791] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance '1186da93-57aa-40f4-8aae-702d039844d4' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 725.955182] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 725.958119] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-845eb90f-8bc8-418a-8f2b-11d52805aec7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.963881] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 725.963881] env[68437]: value = "task-2943785" [ 725.963881] env[68437]: _type = "Task" [ 725.963881] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.976082] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943785, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.982385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.011526] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 726.011526] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 726.011526] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Deleting the datastore file [datastore1] 0649ee2f-cd90-4597-b7c4-09f2acaf3f54 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 726.011526] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06f78b99-219b-4fb8-b01a-776df8dcad50 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.016712] env[68437]: DEBUG oslo_vmware.api [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for the task: (returnval){ [ 726.016712] env[68437]: value = "task-2943786" [ 726.016712] env[68437]: _type = "Task" [ 726.016712] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.028623] env[68437]: DEBUG oslo_vmware.api [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943786, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.033528] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.033699] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquired lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.033970] env[68437]: DEBUG nova.network.neutron [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 726.230481] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943783, 'name': Rename_Task, 'duration_secs': 0.174713} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.230865] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 726.231755] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6ca5e39-985f-42d5-97c4-74414fce8ff6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.241022] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 726.241022] env[68437]: value = "task-2943787" [ 726.241022] env[68437]: _type = "Task" [ 726.241022] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.251508] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943787, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.318342] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.319064] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.349098] env[68437]: DEBUG nova.compute.utils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 726.356891] env[68437]: DEBUG nova.compute.manager [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 726.357100] env[68437]: DEBUG nova.network.neutron [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 726.427022] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 726.427022] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.427022] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 726.428095] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.428538] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 726.428844] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 726.431540] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 726.431540] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 726.431540] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 726.431540] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 726.431540] env[68437]: DEBUG nova.virt.hardware [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 726.438293] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Reconfiguring VM instance instance-00000008 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 726.440965] env[68437]: DEBUG nova.policy [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '915414e3be8a4311a96513aa2fec7053', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ae9811689c645a7af2096a600ed6e1e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 726.443865] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26d25426-b014-4521-a72b-ee7be93a8bf0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.481459] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943785, 'name': PowerOffVM_Task, 'duration_secs': 0.378834} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.484466] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 726.484591] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 726.484591] env[68437]: value = "task-2943788" [ 726.484591] env[68437]: _type = "Task" [ 726.484591] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.485589] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dcf098a-a616-4377-a2e4-95838e8adc0f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.500894] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943788, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.529766] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cbf751-30c7-4969-848a-34b75bcef498 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.543770] env[68437]: DEBUG oslo_vmware.api [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Task: {'id': task-2943786, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163163} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.548145] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 726.548501] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 726.548862] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 726.549095] env[68437]: INFO nova.compute.manager [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Took 1.21 seconds to destroy the instance on the hypervisor. [ 726.549466] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 726.549814] env[68437]: DEBUG nova.network.neutron [-] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.554696] env[68437]: DEBUG nova.compute.manager [-] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 726.554865] env[68437]: DEBUG nova.network.neutron [-] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 726.580867] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 726.581484] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-571acdf9-74f0-4443-91be-03c09acd9955 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.588620] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 726.588620] env[68437]: value = "task-2943789" [ 726.588620] env[68437]: _type = "Task" [ 726.588620] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.598402] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 726.598632] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.598875] env[68437]: DEBUG oslo_concurrency.lockutils [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.599032] env[68437]: DEBUG oslo_concurrency.lockutils [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.599217] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.599459] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7fbe3bb-fa3e-4514-b635-8f679ddd39b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.610826] env[68437]: DEBUG nova.network.neutron [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 726.617936] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.617936] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 726.617936] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6beb834-6046-431b-a2d8-181ba37f8ae1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.624544] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 726.624544] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5230f6a9-5117-ebc0-331c-0cdd67b1e163" [ 726.624544] env[68437]: _type = "Task" [ 726.624544] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.632229] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5230f6a9-5117-ebc0-331c-0cdd67b1e163, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.727062] env[68437]: DEBUG nova.compute.manager [req-66f7c04b-69ba-4ef1-80fc-a156d6be0e1b req-e18d0f9c-105d-4793-b399-c21def3af3ca service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Received event network-vif-plugged-102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 726.727295] env[68437]: DEBUG oslo_concurrency.lockutils [req-66f7c04b-69ba-4ef1-80fc-a156d6be0e1b req-e18d0f9c-105d-4793-b399-c21def3af3ca service nova] Acquiring lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.727528] env[68437]: DEBUG oslo_concurrency.lockutils [req-66f7c04b-69ba-4ef1-80fc-a156d6be0e1b req-e18d0f9c-105d-4793-b399-c21def3af3ca service nova] Lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.727683] env[68437]: DEBUG oslo_concurrency.lockutils [req-66f7c04b-69ba-4ef1-80fc-a156d6be0e1b req-e18d0f9c-105d-4793-b399-c21def3af3ca service nova] Lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.727853] env[68437]: DEBUG nova.compute.manager [req-66f7c04b-69ba-4ef1-80fc-a156d6be0e1b req-e18d0f9c-105d-4793-b399-c21def3af3ca service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] No waiting events found dispatching network-vif-plugged-102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 726.728104] env[68437]: WARNING nova.compute.manager [req-66f7c04b-69ba-4ef1-80fc-a156d6be0e1b req-e18d0f9c-105d-4793-b399-c21def3af3ca service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Received unexpected event network-vif-plugged-102fc7ce-ac0b-465b-8073-7ba895ea1293 for instance with vm_state building and task_state spawning. [ 726.750415] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943787, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.840884] env[68437]: DEBUG nova.network.neutron [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updating instance_info_cache with network_info: [{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.859315] env[68437]: DEBUG nova.compute.manager [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 726.985818] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.985818] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.007089] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943788, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.054791] env[68437]: INFO nova.compute.manager [-] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Took 1.63 seconds to deallocate network for instance. [ 727.141025] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5230f6a9-5117-ebc0-331c-0cdd67b1e163, 'name': SearchDatastore_Task, 'duration_secs': 0.026375} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.144112] env[68437]: DEBUG nova.compute.manager [req-b965b314-c5b8-4b42-a043-81ceb6c0c55e req-4c9bac76-7c94-4f02-a184-0b53d349be6c service nova] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Received event network-vif-deleted-ac2dc22a-b9e3-4855-81b4-4f26c019fc72 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 727.144301] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d3bdca5-db95-45b5-8dcb-5ac0e8f69c97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.147623] env[68437]: DEBUG nova.network.neutron [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Successfully created port: b1d3a732-f87f-4b26-a261-f7dccc5912ac {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.154803] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 727.154803] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52791ad1-8829-4ca6-efb7-9b3580f561b1" [ 727.154803] env[68437]: _type = "Task" [ 727.154803] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.162914] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52791ad1-8829-4ca6-efb7-9b3580f561b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.252671] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943787, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.344363] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Releasing lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.344710] env[68437]: DEBUG nova.compute.manager [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Instance network_info: |[{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 727.345522] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:55:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11da2092-76f7-447e-babb-8fc14ad39a71', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '102fc7ce-ac0b-465b-8073-7ba895ea1293', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.356448] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Creating folder: Project (3a8ec160fb3148c6aa238e70a975496f). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.359601] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-544a371b-9937-45a4-b1f7-bee546256a73 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.378895] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Created folder: Project (3a8ec160fb3148c6aa238e70a975496f) in parent group-v590848. [ 727.379372] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Creating folder: Instances. Parent ref: group-v590912. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.379616] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb310199-85f3-4607-8c86-fd6a0cf7df1b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.390155] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Created folder: Instances in parent group-v590912. [ 727.390469] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 727.390696] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 727.390924] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9aa5d08c-1898-48c0-a1f2-5cbe755887a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.418615] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.418615] env[68437]: value = "task-2943796" [ 727.418615] env[68437]: _type = "Task" [ 727.418615] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.429212] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943796, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.499693] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943788, 'name': ReconfigVM_Task, 'duration_secs': 0.658533} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.500032] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Reconfigured VM instance instance-00000008 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 727.501038] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3483adc9-ac8a-423a-8f2b-c2b4834b00da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.506668] env[68437]: DEBUG nova.network.neutron [-] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.528265] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 1186da93-57aa-40f4-8aae-702d039844d4/1186da93-57aa-40f4-8aae-702d039844d4.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 727.531609] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a77f906-5f50-4096-b879-0d5059baee76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.551029] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 727.551029] env[68437]: value = "task-2943797" [ 727.551029] env[68437]: _type = "Task" [ 727.551029] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.557594] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbb43f5-5699-4f38-908a-54d224d6e28c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.564056] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943797, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.565353] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.568528] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ee7160-b1fd-40f8-b0ab-a2c6965df7f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.603126] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e916980a-77ae-46fd-a054-5b33f548b865 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.615942] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86becedb-850b-4e63-b1e8-d40cc92d7034 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.633613] env[68437]: DEBUG nova.compute.provider_tree [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.665864] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52791ad1-8829-4ca6-efb7-9b3580f561b1, 'name': SearchDatastore_Task, 'duration_secs': 0.019222} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.666198] env[68437]: DEBUG oslo_concurrency.lockutils [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.666498] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] cf691a81-60e3-40ed-ba80-8f481ff2554b/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. {{(pid=68437) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 727.667256] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f98616c-4d04-4712-a551-d351269696a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.675084] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 727.675084] env[68437]: value = "task-2943798" [ 727.675084] env[68437]: _type = "Task" [ 727.675084] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.688628] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.752184] env[68437]: DEBUG oslo_vmware.api [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943787, 'name': PowerOnVM_Task, 'duration_secs': 1.161916} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.752960] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 727.753264] env[68437]: INFO nova.compute.manager [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Took 8.26 seconds to spawn the instance on the hypervisor. [ 727.753550] env[68437]: DEBUG nova.compute.manager [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 727.754378] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac488997-a129-4c18-bf45-664d6178c132 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.790773] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.791076] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.791426] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.791733] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.791900] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.794318] env[68437]: INFO nova.compute.manager [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Terminating instance [ 727.868619] env[68437]: DEBUG nova.compute.manager [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 727.908733] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 727.909241] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.910491] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 727.910491] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.910491] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 727.910491] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 727.910491] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 727.910838] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 727.910838] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 727.911159] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 727.911385] env[68437]: DEBUG nova.virt.hardware [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 727.912549] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2e83b0-e3c6-4076-8a9a-cdbe55a61f07 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.928943] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5944e403-d6d6-4504-b12a-36ca0d2267a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.947798] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943796, 'name': CreateVM_Task, 'duration_secs': 0.410354} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.948094] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 727.948936] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.949156] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.949525] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 727.949943] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f522b83d-1686-4a26-80cf-7d36c1a57ce8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.955983] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 727.955983] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d3b359-4580-e162-b9ef-e4b83d18b894" [ 727.955983] env[68437]: _type = "Task" [ 727.955983] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.967438] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d3b359-4580-e162-b9ef-e4b83d18b894, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.010391] env[68437]: INFO nova.compute.manager [-] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Took 1.46 seconds to deallocate network for instance. [ 728.064653] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943797, 'name': ReconfigVM_Task, 'duration_secs': 0.452771} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.065090] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 1186da93-57aa-40f4-8aae-702d039844d4/1186da93-57aa-40f4-8aae-702d039844d4.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 728.065331] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance '1186da93-57aa-40f4-8aae-702d039844d4' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 728.112193] env[68437]: INFO nova.compute.manager [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Rebuilding instance [ 728.138664] env[68437]: DEBUG nova.scheduler.client.report [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.182023] env[68437]: DEBUG nova.compute.manager [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 728.183507] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da7d9af-943e-46d0-93f2-f0b25fc7df4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.194330] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943798, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.276350] env[68437]: INFO nova.compute.manager [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Took 29.41 seconds to build instance. [ 728.298306] env[68437]: DEBUG nova.compute.manager [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 728.298544] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 728.301769] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e88cc6-e156-4ae1-a93c-cf192a9ca439 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.308367] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.308627] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90c9967b-868a-43e4-997b-35b1586099ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.316437] env[68437]: DEBUG oslo_vmware.api [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 728.316437] env[68437]: value = "task-2943799" [ 728.316437] env[68437]: _type = "Task" [ 728.316437] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.325527] env[68437]: DEBUG oslo_vmware.api [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943799, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.471592] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d3b359-4580-e162-b9ef-e4b83d18b894, 'name': SearchDatastore_Task, 'duration_secs': 0.038581} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.471592] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.471592] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 728.471592] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.471793] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.471793] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 728.471917] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-100564c7-8c6a-43ca-9e81-77f8d2f89cff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.480784] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 728.481025] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 728.481700] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c8b6c24-f54c-426d-9494-b626deeb94ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.487092] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 728.487092] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5295b7d7-4d5a-14f7-81f9-6389f083dd5f" [ 728.487092] env[68437]: _type = "Task" [ 728.487092] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.495044] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5295b7d7-4d5a-14f7-81f9-6389f083dd5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.520523] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.574592] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812c81c2-9396-4e6c-928b-81ec2db757e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.599841] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb68ab9-10e1-4c98-b509-9c79f6d603fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.621666] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance '1186da93-57aa-40f4-8aae-702d039844d4' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 728.650677] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.809s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.650677] env[68437]: DEBUG nova.compute.manager [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 728.653129] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.944s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.653458] env[68437]: DEBUG nova.objects.instance [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lazy-loading 'resources' on Instance uuid ce8fd88b-249b-4fee-80fc-35b795d24658 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 728.687375] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943798, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522413} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.687668] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] cf691a81-60e3-40ed-ba80-8f481ff2554b/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. [ 728.688506] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625cdb5f-d878-402f-9195-1739fe428545 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.719505] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] cf691a81-60e3-40ed-ba80-8f481ff2554b/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.720776] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.721332] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af3c3885-9feb-42be-9757-786719737fc9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.734566] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c11168e-473e-4bb2-8a5f-ec3cb9bccd63 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.741675] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 728.741675] env[68437]: value = "task-2943800" [ 728.741675] env[68437]: _type = "Task" [ 728.741675] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.742879] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 728.742879] env[68437]: value = "task-2943801" [ 728.742879] env[68437]: _type = "Task" [ 728.742879] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.754327] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943800, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.757881] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943801, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.780082] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d0d776f6-71cf-4d2f-a759-18d017147ba3 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "d7c64aa1-44f8-44f4-9fb6-463033837736" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.797s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.827865] env[68437]: DEBUG oslo_vmware.api [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943799, 'name': PowerOffVM_Task, 'duration_secs': 0.239836} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.828191] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 728.828400] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 728.828758] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9244002d-85db-410f-9263-04878e9a58e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.897572] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 728.899620] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 728.899620] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleting the datastore file [datastore1] f517b14c-320f-4a6e-ae74-f2335e22f7a4 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 728.899620] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc3cc07c-92b2-4e07-8d38-ead521307be7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.906608] env[68437]: DEBUG oslo_vmware.api [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 728.906608] env[68437]: value = "task-2943803" [ 728.906608] env[68437]: _type = "Task" [ 728.906608] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.914274] env[68437]: DEBUG oslo_vmware.api [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943803, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.943132] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "860107df-4e9b-44b1-9e85-b0ee3a827268" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.943132] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "860107df-4e9b-44b1-9e85-b0ee3a827268" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.999182] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5295b7d7-4d5a-14f7-81f9-6389f083dd5f, 'name': SearchDatastore_Task, 'duration_secs': 0.012237} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.000064] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41c54be4-7fae-4bfb-a124-7a92ccceb3cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.011081] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 729.011081] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522c69ea-38ab-c49a-22b8-bba93a9025f8" [ 729.011081] env[68437]: _type = "Task" [ 729.011081] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.017738] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522c69ea-38ab-c49a-22b8-bba93a9025f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.159214] env[68437]: DEBUG nova.compute.utils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 729.166690] env[68437]: DEBUG nova.compute.manager [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 729.167169] env[68437]: DEBUG nova.network.neutron [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 729.205297] env[68437]: DEBUG nova.compute.manager [req-09fbeaa8-5858-48a8-a278-f44a64d4dbeb req-9d635be2-be7a-4ac4-b7e7-6d7888ff4929 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Received event network-changed-102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 729.205366] env[68437]: DEBUG nova.compute.manager [req-09fbeaa8-5858-48a8-a278-f44a64d4dbeb req-9d635be2-be7a-4ac4-b7e7-6d7888ff4929 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Refreshing instance network info cache due to event network-changed-102fc7ce-ac0b-465b-8073-7ba895ea1293. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 729.205759] env[68437]: DEBUG oslo_concurrency.lockutils [req-09fbeaa8-5858-48a8-a278-f44a64d4dbeb req-9d635be2-be7a-4ac4-b7e7-6d7888ff4929 service nova] Acquiring lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.205988] env[68437]: DEBUG oslo_concurrency.lockutils [req-09fbeaa8-5858-48a8-a278-f44a64d4dbeb req-9d635be2-be7a-4ac4-b7e7-6d7888ff4929 service nova] Acquired lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.206222] env[68437]: DEBUG nova.network.neutron [req-09fbeaa8-5858-48a8-a278-f44a64d4dbeb req-9d635be2-be7a-4ac4-b7e7-6d7888ff4929 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Refreshing network info cache for port 102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 729.210424] env[68437]: DEBUG nova.network.neutron [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Successfully updated port: b1d3a732-f87f-4b26-a261-f7dccc5912ac {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 729.266954] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943800, 'name': ReconfigVM_Task, 'duration_secs': 0.369214} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.276538] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Reconfigured VM instance instance-00000012 to attach disk [datastore2] cf691a81-60e3-40ed-ba80-8f481ff2554b/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.277771] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943801, 'name': PowerOffVM_Task, 'duration_secs': 0.110232} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.278129] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0729de-55d4-4d36-a869-973aacbd554d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.280826] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 729.281457] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 729.282194] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6fac4f-bced-4b09-9e2c-1cff362db637 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.285984] env[68437]: DEBUG nova.compute.manager [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 729.314250] env[68437]: DEBUG nova.network.neutron [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Port 4fd952c0-7921-4632-b5de-2fe90c4bba05 binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 729.318968] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71b54350-c65a-4457-a7bf-17952b349280 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.329921] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 729.333155] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6af55412-96f8-4c84-a094-35f915dc4082 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.339048] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 729.339048] env[68437]: value = "task-2943804" [ 729.339048] env[68437]: _type = "Task" [ 729.339048] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.343952] env[68437]: DEBUG nova.policy [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b57f48bbefbe436fb84cf86752a7cfab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03cd64940cc64e7baceabbc7983889df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 729.358142] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943804, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.359227] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 729.359435] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 729.359616] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Deleting the datastore file [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 729.360050] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3802940f-7adc-4697-8d20-6cfef56db044 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.368801] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 729.368801] env[68437]: value = "task-2943806" [ 729.368801] env[68437]: _type = "Task" [ 729.368801] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.380120] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.417709] env[68437]: DEBUG oslo_vmware.api [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2943803, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.366671} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.417709] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.417709] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.418019] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.418019] env[68437]: INFO nova.compute.manager [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 729.418263] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.418736] env[68437]: DEBUG nova.compute.manager [-] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 729.418736] env[68437]: DEBUG nova.network.neutron [-] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 729.446243] env[68437]: DEBUG oslo_concurrency.lockutils [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.446540] env[68437]: DEBUG oslo_concurrency.lockutils [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.447486] env[68437]: DEBUG oslo_concurrency.lockutils [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.447486] env[68437]: DEBUG oslo_concurrency.lockutils [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.447486] env[68437]: DEBUG oslo_concurrency.lockutils [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.452510] env[68437]: INFO nova.compute.manager [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Terminating instance [ 729.525998] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522c69ea-38ab-c49a-22b8-bba93a9025f8, 'name': SearchDatastore_Task, 'duration_secs': 0.019025} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.526141] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.526410] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 07d98c5c-ede8-4001-93b2-1b1d83687ca1/07d98c5c-ede8-4001-93b2-1b1d83687ca1.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 729.526670] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe3d3b33-384c-497e-a939-cd81af39914d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.533415] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 729.533415] env[68437]: value = "task-2943807" [ 729.533415] env[68437]: _type = "Task" [ 729.533415] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.544816] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.597384] env[68437]: DEBUG nova.compute.manager [req-bf8f2a39-8579-45cb-b24b-f6e4df6502ce req-1dc91ce0-87c1-4d2b-b8fa-7f643e50f00a service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Received event network-vif-plugged-b1d3a732-f87f-4b26-a261-f7dccc5912ac {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 729.597384] env[68437]: DEBUG oslo_concurrency.lockutils [req-bf8f2a39-8579-45cb-b24b-f6e4df6502ce req-1dc91ce0-87c1-4d2b-b8fa-7f643e50f00a service nova] Acquiring lock "ad773afa-fc0a-4380-901d-af013ce55f2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.597384] env[68437]: DEBUG oslo_concurrency.lockutils [req-bf8f2a39-8579-45cb-b24b-f6e4df6502ce req-1dc91ce0-87c1-4d2b-b8fa-7f643e50f00a service nova] Lock "ad773afa-fc0a-4380-901d-af013ce55f2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.597384] env[68437]: DEBUG oslo_concurrency.lockutils [req-bf8f2a39-8579-45cb-b24b-f6e4df6502ce req-1dc91ce0-87c1-4d2b-b8fa-7f643e50f00a service nova] Lock "ad773afa-fc0a-4380-901d-af013ce55f2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.597384] env[68437]: DEBUG nova.compute.manager [req-bf8f2a39-8579-45cb-b24b-f6e4df6502ce req-1dc91ce0-87c1-4d2b-b8fa-7f643e50f00a service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] No waiting events found dispatching network-vif-plugged-b1d3a732-f87f-4b26-a261-f7dccc5912ac {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 729.597706] env[68437]: WARNING nova.compute.manager [req-bf8f2a39-8579-45cb-b24b-f6e4df6502ce req-1dc91ce0-87c1-4d2b-b8fa-7f643e50f00a service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Received unexpected event network-vif-plugged-b1d3a732-f87f-4b26-a261-f7dccc5912ac for instance with vm_state building and task_state spawning. [ 729.664290] env[68437]: DEBUG nova.compute.manager [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 729.714136] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "refresh_cache-ad773afa-fc0a-4380-901d-af013ce55f2b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.714283] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "refresh_cache-ad773afa-fc0a-4380-901d-af013ce55f2b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.714785] env[68437]: DEBUG nova.network.neutron [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 729.809691] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.852941] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943804, 'name': ReconfigVM_Task, 'duration_secs': 0.161161} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.853235] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.853586] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50d5d7e9-01d3-442f-8b32-371e8908296d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.861954] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 729.861954] env[68437]: value = "task-2943808" [ 729.861954] env[68437]: _type = "Task" [ 729.861954] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.886990] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.891498] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264044} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.891498] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.891903] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.892122] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.964839] env[68437]: DEBUG nova.compute.manager [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 729.964839] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 729.972077] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b239dd-1b84-4efa-a63c-aa1c8ea98e46 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.985740] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 729.985740] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89129c96-27ef-46d4-8cf6-0acd16eed3fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.996128] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e66bfc-d980-482c-8b9a-c6fe48980e94 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.008902] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03569f07-b6d5-4459-bb21-4148b9efe6d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.048510] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8195a32-0fa8-4231-8d25-b89ca9c47878 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.059018] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943807, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.064387] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 730.064744] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 730.065018] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleting the datastore file [datastore1] a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 730.068870] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c571cb-b933-40ca-9a11-d8f2d5a77d85 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.073041] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a30fc386-1fdc-4629-bcfb-830838b5f1f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.089026] env[68437]: DEBUG nova.compute.provider_tree [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.089848] env[68437]: DEBUG oslo_vmware.api [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 730.089848] env[68437]: value = "task-2943811" [ 730.089848] env[68437]: _type = "Task" [ 730.089848] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.101521] env[68437]: DEBUG oslo_vmware.api [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.308587] env[68437]: DEBUG nova.network.neutron [req-09fbeaa8-5858-48a8-a278-f44a64d4dbeb req-9d635be2-be7a-4ac4-b7e7-6d7888ff4929 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updated VIF entry in instance network info cache for port 102fc7ce-ac0b-465b-8073-7ba895ea1293. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 730.309937] env[68437]: DEBUG nova.network.neutron [req-09fbeaa8-5858-48a8-a278-f44a64d4dbeb req-9d635be2-be7a-4ac4-b7e7-6d7888ff4929 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updating instance_info_cache with network_info: [{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.319900] env[68437]: DEBUG nova.network.neutron [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 730.344088] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "1186da93-57aa-40f4-8aae-702d039844d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.344718] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.344978] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.380496] env[68437]: DEBUG oslo_vmware.api [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943808, 'name': PowerOnVM_Task, 'duration_secs': 0.512848} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.381855] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 730.384921] env[68437]: DEBUG nova.compute.manager [None req-92dc9da5-ca2b-4cd0-b6d9-d2ef8bbb652b tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.385953] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bc0ba8-57c3-40d9-9b0b-58b04ff1b4d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.431606] env[68437]: DEBUG nova.network.neutron [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Successfully created port: cc91b233-efdf-4cb6-9817-3f48a59237be {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.557766] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.760086} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.558046] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 07d98c5c-ede8-4001-93b2-1b1d83687ca1/07d98c5c-ede8-4001-93b2-1b1d83687ca1.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 730.558617] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 730.559031] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d2086a7-f90b-4244-b427-757eabd9498b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.567934] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 730.567934] env[68437]: value = "task-2943812" [ 730.567934] env[68437]: _type = "Task" [ 730.567934] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.577715] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943812, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.596649] env[68437]: DEBUG nova.scheduler.client.report [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 730.612228] env[68437]: DEBUG oslo_vmware.api [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.494096} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.612496] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 730.614444] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 730.614444] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 730.614444] env[68437]: INFO nova.compute.manager [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Took 0.65 seconds to destroy the instance on the hypervisor. [ 730.614444] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 730.614444] env[68437]: DEBUG nova.compute.manager [-] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 730.615410] env[68437]: DEBUG nova.network.neutron [-] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 730.682351] env[68437]: DEBUG nova.compute.manager [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 730.716826] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:38:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='417506587',id=26,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1853946616',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 730.717192] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.717504] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 730.717818] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.718076] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 730.718329] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 730.718648] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 730.719879] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 730.720165] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 730.720427] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 730.720696] env[68437]: DEBUG nova.virt.hardware [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 730.721964] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9881b2-03cc-437f-9991-c02455c14665 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.725529] env[68437]: DEBUG nova.network.neutron [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Updating instance_info_cache with network_info: [{"id": "b1d3a732-f87f-4b26-a261-f7dccc5912ac", "address": "fa:16:3e:8b:b5:cc", "network": {"id": "1c341ad6-0e1b-4211-be89-d39bb74a6f59", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-101911256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ae9811689c645a7af2096a600ed6e1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d3a732-f8", "ovs_interfaceid": "b1d3a732-f87f-4b26-a261-f7dccc5912ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.731996] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504e378f-9d48-445a-b1b6-90f9a43a7fca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.813761] env[68437]: DEBUG oslo_concurrency.lockutils [req-09fbeaa8-5858-48a8-a278-f44a64d4dbeb req-9d635be2-be7a-4ac4-b7e7-6d7888ff4929 service nova] Releasing lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.814063] env[68437]: DEBUG nova.compute.manager [req-09fbeaa8-5858-48a8-a278-f44a64d4dbeb req-9d635be2-be7a-4ac4-b7e7-6d7888ff4929 service nova] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Received event network-vif-deleted-bd37d751-cb4d-4517-b9be-bfa192d0fff9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 730.942444] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 730.945493] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.945493] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 730.945493] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.945493] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 730.945493] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 730.946028] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 730.946028] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 730.946028] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 730.946028] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 730.946028] env[68437]: DEBUG nova.virt.hardware [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 730.947918] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0977e5b0-2f56-4e4e-81c3-3d66e6bdfdf0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.959123] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e620330-a817-4c82-b2d9-9dc37f0990d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.976500] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.982615] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 730.983147] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 730.985967] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14392c2b-4d86-4bc5-93e3-dcb1bd0d358e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.003876] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 731.003876] env[68437]: value = "task-2943813" [ 731.003876] env[68437]: _type = "Task" [ 731.003876] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.013462] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943813, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.078657] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943812, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080099} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.078657] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 731.079382] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f6e80c-126b-4cb7-b5b0-93d015ef4e56 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.106716] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 07d98c5c-ede8-4001-93b2-1b1d83687ca1/07d98c5c-ede8-4001-93b2-1b1d83687ca1.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 731.107200] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.454s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.110053] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-072cdac0-d359-4f6a-9262-d71de1681d2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.124600] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.544s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.126702] env[68437]: INFO nova.compute.claims [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.136678] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 731.136678] env[68437]: value = "task-2943814" [ 731.136678] env[68437]: _type = "Task" [ 731.136678] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.146060] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943814, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.156267] env[68437]: INFO nova.scheduler.client.report [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Deleted allocations for instance ce8fd88b-249b-4fee-80fc-35b795d24658 [ 731.162849] env[68437]: DEBUG nova.network.neutron [-] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.228414] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "refresh_cache-ad773afa-fc0a-4380-901d-af013ce55f2b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.228749] env[68437]: DEBUG nova.compute.manager [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Instance network_info: |[{"id": "b1d3a732-f87f-4b26-a261-f7dccc5912ac", "address": "fa:16:3e:8b:b5:cc", "network": {"id": "1c341ad6-0e1b-4211-be89-d39bb74a6f59", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-101911256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ae9811689c645a7af2096a600ed6e1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d3a732-f8", "ovs_interfaceid": "b1d3a732-f87f-4b26-a261-f7dccc5912ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 731.229237] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:b5:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '748a5204-8f14-402c-9a6e-f3e6104db082', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1d3a732-f87f-4b26-a261-f7dccc5912ac', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 731.239422] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Creating folder: Project (0ae9811689c645a7af2096a600ed6e1e). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 731.241279] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18221829-1f42-41fe-914b-f9a7c1685c20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.253987] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Created folder: Project (0ae9811689c645a7af2096a600ed6e1e) in parent group-v590848. [ 731.254254] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Creating folder: Instances. Parent ref: group-v590916. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 731.254594] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fe4aef8-d1a4-4f38-b912-8ab5621bb0fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.265829] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Created folder: Instances in parent group-v590916. [ 731.266518] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 731.266518] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 731.266650] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68b8d991-c874-449d-a531-7af2dce9275d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.292668] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 731.292668] env[68437]: value = "task-2943817" [ 731.292668] env[68437]: _type = "Task" [ 731.292668] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.301881] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943817, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.446904] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.447192] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.447282] env[68437]: DEBUG nova.network.neutron [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 731.516644] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943813, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.566968] env[68437]: DEBUG nova.network.neutron [-] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.611623] env[68437]: DEBUG nova.compute.manager [req-2722ce0e-ae99-491d-9dea-5c021eb8616c req-4944083d-a9d1-40a0-a6f0-22d8874df7e6 service nova] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Received event network-vif-deleted-5b158664-3a56-450a-8a96-2e42835511e3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 731.652347] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943814, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.666923] env[68437]: INFO nova.compute.manager [-] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Took 2.25 seconds to deallocate network for instance. [ 731.666923] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d4066c30-aebf-491d-a8b7-ccbecdcae22d tempest-InstanceActionsTestJSON-1237950300 tempest-InstanceActionsTestJSON-1237950300-project-member] Lock "ce8fd88b-249b-4fee-80fc-35b795d24658" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.537s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.807591] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943817, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.016129] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943813, 'name': CreateVM_Task, 'duration_secs': 0.536893} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.018946] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 732.020293] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.020505] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.020863] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 732.021646] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ec996e4-4e0b-4616-a0c6-dc47df086c2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.026704] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 732.026704] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52efb6e1-9c8f-b15c-03f7-cbdfd586684f" [ 732.026704] env[68437]: _type = "Task" [ 732.026704] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.035897] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52efb6e1-9c8f-b15c-03f7-cbdfd586684f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.069586] env[68437]: INFO nova.compute.manager [-] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Took 1.46 seconds to deallocate network for instance. [ 732.154610] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943814, 'name': ReconfigVM_Task, 'duration_secs': 0.680111} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.155011] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 07d98c5c-ede8-4001-93b2-1b1d83687ca1/07d98c5c-ede8-4001-93b2-1b1d83687ca1.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 732.155650] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4accee1-dfa9-4d10-85f8-30fdd7d5af5c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.162116] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 732.162116] env[68437]: value = "task-2943819" [ 732.162116] env[68437]: _type = "Task" [ 732.162116] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.174463] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.174798] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943819, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.254805] env[68437]: DEBUG nova.compute.manager [req-e4dea827-dee0-4c67-841a-948896873b16 req-2e5ba9dd-a1fa-4702-8e52-b72f5a0133eb service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Received event network-changed-b1d3a732-f87f-4b26-a261-f7dccc5912ac {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 732.255085] env[68437]: DEBUG nova.compute.manager [req-e4dea827-dee0-4c67-841a-948896873b16 req-2e5ba9dd-a1fa-4702-8e52-b72f5a0133eb service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Refreshing instance network info cache due to event network-changed-b1d3a732-f87f-4b26-a261-f7dccc5912ac. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 732.256120] env[68437]: DEBUG oslo_concurrency.lockutils [req-e4dea827-dee0-4c67-841a-948896873b16 req-2e5ba9dd-a1fa-4702-8e52-b72f5a0133eb service nova] Acquiring lock "refresh_cache-ad773afa-fc0a-4380-901d-af013ce55f2b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.256120] env[68437]: DEBUG oslo_concurrency.lockutils [req-e4dea827-dee0-4c67-841a-948896873b16 req-2e5ba9dd-a1fa-4702-8e52-b72f5a0133eb service nova] Acquired lock "refresh_cache-ad773afa-fc0a-4380-901d-af013ce55f2b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.256457] env[68437]: DEBUG nova.network.neutron [req-e4dea827-dee0-4c67-841a-948896873b16 req-2e5ba9dd-a1fa-4702-8e52-b72f5a0133eb service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Refreshing network info cache for port b1d3a732-f87f-4b26-a261-f7dccc5912ac {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 732.306903] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943817, 'name': CreateVM_Task, 'duration_secs': 0.526079} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.306903] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 732.307287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.322806] env[68437]: DEBUG nova.network.neutron [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Successfully updated port: cc91b233-efdf-4cb6-9817-3f48a59237be {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 732.387098] env[68437]: DEBUG nova.network.neutron [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance_info_cache with network_info: [{"id": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "address": "fa:16:3e:fc:76:b9", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd952c0-79", "ovs_interfaceid": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.540628] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52efb6e1-9c8f-b15c-03f7-cbdfd586684f, 'name': SearchDatastore_Task, 'duration_secs': 0.021086} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.541995] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.541995] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 732.541995] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.541995] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.542229] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.542229] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.543462] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 732.543462] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-108db625-22d2-44d7-adba-a4ef958379fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.546104] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40475a74-5899-4eac-86fc-d3edea4e7d87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.555903] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquiring lock "995a3eae-c025-4efa-b509-0bf678bb0388" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.555903] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "995a3eae-c025-4efa-b509-0bf678bb0388" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.558753] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 732.558753] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5208324a-0b6e-2e5c-f596-2369508ccbd2" [ 732.558753] env[68437]: _type = "Task" [ 732.558753] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.558753] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.559145] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 732.562700] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cf89a41-7587-458e-9726-675d70924ea1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.572806] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5208324a-0b6e-2e5c-f596-2369508ccbd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.575187] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 732.575187] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5230b320-02aa-919f-efa3-db01d0025280" [ 732.575187] env[68437]: _type = "Task" [ 732.575187] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.582707] env[68437]: DEBUG oslo_concurrency.lockutils [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.587871] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5230b320-02aa-919f-efa3-db01d0025280, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.673448] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.674072] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943819, 'name': Rename_Task, 'duration_secs': 0.141426} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.676659] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.678459] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 732.678940] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4b80578-bd35-4d5c-82d6-732deecf3e3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.685892] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 732.685892] env[68437]: value = "task-2943820" [ 732.685892] env[68437]: _type = "Task" [ 732.685892] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.694772] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943820, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.759651] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b55333-37c4-41eb-a3e0-b62eba7c9d66 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.770494] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a9f60d-28c7-4940-8cb3-b756d43017a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.805503] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ad874f-c1a8-4681-9fdd-8ec179ec692d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.815703] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664eac83-6f01-4699-a1e9-548e340c8eb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.830682] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "refresh_cache-c74569b8-dfc9-4a74-9d25-74b484bd9477" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.830841] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquired lock "refresh_cache-c74569b8-dfc9-4a74-9d25-74b484bd9477" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.831316] env[68437]: DEBUG nova.network.neutron [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 732.832395] env[68437]: DEBUG nova.compute.provider_tree [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.840457] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquiring lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.840682] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.889326] env[68437]: DEBUG oslo_concurrency.lockutils [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.068813] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5208324a-0b6e-2e5c-f596-2369508ccbd2, 'name': SearchDatastore_Task, 'duration_secs': 0.014775} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.069131] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.069368] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 733.069571] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.085255] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5230b320-02aa-919f-efa3-db01d0025280, 'name': SearchDatastore_Task, 'duration_secs': 0.016133} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.086342] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-229aeaa5-1087-48b9-8a06-56405236186f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.092542] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 733.092542] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f5f232-fd94-4ac7-7fde-ebcb004cee1a" [ 733.092542] env[68437]: _type = "Task" [ 733.092542] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.100725] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f5f232-fd94-4ac7-7fde-ebcb004cee1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.194923] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.194923] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.194923] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.194923] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.194923] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.195674] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.195674] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 733.195674] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 733.199330] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943820, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.339453] env[68437]: DEBUG nova.scheduler.client.report [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 733.389412] env[68437]: DEBUG nova.network.neutron [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 733.432641] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca65e1b3-40ec-4098-8f67-18cdc409bcc9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.455212] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ceee8f7-6322-4dba-a8cf-29211818f3fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.462751] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance '1186da93-57aa-40f4-8aae-702d039844d4' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 733.515313] env[68437]: DEBUG nova.network.neutron [req-e4dea827-dee0-4c67-841a-948896873b16 req-2e5ba9dd-a1fa-4702-8e52-b72f5a0133eb service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Updated VIF entry in instance network info cache for port b1d3a732-f87f-4b26-a261-f7dccc5912ac. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 733.515667] env[68437]: DEBUG nova.network.neutron [req-e4dea827-dee0-4c67-841a-948896873b16 req-2e5ba9dd-a1fa-4702-8e52-b72f5a0133eb service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Updating instance_info_cache with network_info: [{"id": "b1d3a732-f87f-4b26-a261-f7dccc5912ac", "address": "fa:16:3e:8b:b5:cc", "network": {"id": "1c341ad6-0e1b-4211-be89-d39bb74a6f59", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-101911256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ae9811689c645a7af2096a600ed6e1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d3a732-f8", "ovs_interfaceid": "b1d3a732-f87f-4b26-a261-f7dccc5912ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.598861] env[68437]: DEBUG nova.network.neutron [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Updating instance_info_cache with network_info: [{"id": "cc91b233-efdf-4cb6-9817-3f48a59237be", "address": "fa:16:3e:35:da:52", "network": {"id": "4c17c5a2-048e-4021-8471-3f6519d65387", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-4738106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03cd64940cc64e7baceabbc7983889df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc91b233-ef", "ovs_interfaceid": "cc91b233-efdf-4cb6-9817-3f48a59237be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.606031] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f5f232-fd94-4ac7-7fde-ebcb004cee1a, 'name': SearchDatastore_Task, 'duration_secs': 0.009938} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.606318] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.606566] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 733.606836] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.607052] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 733.607293] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c3bb077-9cc0-453d-8367-042e33e04b2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.609324] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce7291af-044f-44fd-96a0-eaff49247a27 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.617352] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 733.617352] env[68437]: value = "task-2943821" [ 733.617352] env[68437]: _type = "Task" [ 733.617352] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.620947] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 733.621154] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 733.622267] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee98f83d-bbb5-4535-b9ef-208ac8090420 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.627810] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.630728] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 733.630728] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b9a5db-0e17-42c0-1da4-2663d4089ccf" [ 733.630728] env[68437]: _type = "Task" [ 733.630728] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.638586] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b9a5db-0e17-42c0-1da4-2663d4089ccf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.698647] env[68437]: DEBUG oslo_vmware.api [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2943820, 'name': PowerOnVM_Task, 'duration_secs': 0.61701} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.698917] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 733.699145] env[68437]: INFO nova.compute.manager [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Took 8.63 seconds to spawn the instance on the hypervisor. [ 733.699331] env[68437]: DEBUG nova.compute.manager [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 733.700137] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230a1e7d-6dcc-4be2-8f0b-b48def58d361 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.703210] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.845593] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.721s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.846243] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 733.848646] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.274s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.848863] env[68437]: DEBUG nova.objects.instance [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lazy-loading 'resources' on Instance uuid 1537e626-f2ec-4b5d-bcba-50cd583dff31 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 733.970994] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 733.971338] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbe208b9-e1e0-4e74-891d-38836e8c190f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.980576] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 733.980576] env[68437]: value = "task-2943823" [ 733.980576] env[68437]: _type = "Task" [ 733.980576] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.994601] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943823, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.012397] env[68437]: DEBUG nova.compute.manager [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Received event network-vif-plugged-cc91b233-efdf-4cb6-9817-3f48a59237be {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 734.012397] env[68437]: DEBUG oslo_concurrency.lockutils [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] Acquiring lock "c74569b8-dfc9-4a74-9d25-74b484bd9477-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.012397] env[68437]: DEBUG oslo_concurrency.lockutils [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] Lock "c74569b8-dfc9-4a74-9d25-74b484bd9477-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.012397] env[68437]: DEBUG oslo_concurrency.lockutils [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] Lock "c74569b8-dfc9-4a74-9d25-74b484bd9477-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.012397] env[68437]: DEBUG nova.compute.manager [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] No waiting events found dispatching network-vif-plugged-cc91b233-efdf-4cb6-9817-3f48a59237be {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 734.012580] env[68437]: WARNING nova.compute.manager [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Received unexpected event network-vif-plugged-cc91b233-efdf-4cb6-9817-3f48a59237be for instance with vm_state building and task_state spawning. [ 734.012580] env[68437]: DEBUG nova.compute.manager [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Received event network-changed-cc91b233-efdf-4cb6-9817-3f48a59237be {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 734.013440] env[68437]: DEBUG nova.compute.manager [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Refreshing instance network info cache due to event network-changed-cc91b233-efdf-4cb6-9817-3f48a59237be. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 734.013813] env[68437]: DEBUG oslo_concurrency.lockutils [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] Acquiring lock "refresh_cache-c74569b8-dfc9-4a74-9d25-74b484bd9477" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.018836] env[68437]: DEBUG oslo_concurrency.lockutils [req-e4dea827-dee0-4c67-841a-948896873b16 req-2e5ba9dd-a1fa-4702-8e52-b72f5a0133eb service nova] Releasing lock "refresh_cache-ad773afa-fc0a-4380-901d-af013ce55f2b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.020322] env[68437]: DEBUG nova.compute.manager [req-e4dea827-dee0-4c67-841a-948896873b16 req-2e5ba9dd-a1fa-4702-8e52-b72f5a0133eb service nova] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Received event network-vif-deleted-ad41ce28-cc67-4b33-b2d2-257330d41543 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 734.104024] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Releasing lock "refresh_cache-c74569b8-dfc9-4a74-9d25-74b484bd9477" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.104024] env[68437]: DEBUG nova.compute.manager [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Instance network_info: |[{"id": "cc91b233-efdf-4cb6-9817-3f48a59237be", "address": "fa:16:3e:35:da:52", "network": {"id": "4c17c5a2-048e-4021-8471-3f6519d65387", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-4738106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03cd64940cc64e7baceabbc7983889df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc91b233-ef", "ovs_interfaceid": "cc91b233-efdf-4cb6-9817-3f48a59237be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 734.104391] env[68437]: DEBUG oslo_concurrency.lockutils [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] Acquired lock "refresh_cache-c74569b8-dfc9-4a74-9d25-74b484bd9477" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.104391] env[68437]: DEBUG nova.network.neutron [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Refreshing network info cache for port cc91b233-efdf-4cb6-9817-3f48a59237be {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 734.104391] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:da:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '535b175f-71d3-4226-81ab-ca253f27fedd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc91b233-efdf-4cb6-9817-3f48a59237be', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.113592] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Creating folder: Project (03cd64940cc64e7baceabbc7983889df). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.114483] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8c558a8-8a58-4db1-a9e6-ef6a625bba45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.128900] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943821, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.131240] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Created folder: Project (03cd64940cc64e7baceabbc7983889df) in parent group-v590848. [ 734.132017] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Creating folder: Instances. Parent ref: group-v590919. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.132184] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84121e0d-db74-4035-9a06-fc7dfaaa365d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.146711] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b9a5db-0e17-42c0-1da4-2663d4089ccf, 'name': SearchDatastore_Task, 'duration_secs': 0.015836} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.149545] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Created folder: Instances in parent group-v590919. [ 734.149545] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 734.150103] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-039c0dbe-5490-49fa-85ae-2979b2ebe58a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.154060] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 734.158096] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21518e8f-c31d-449b-a034-29d7df5fc9d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.178171] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 734.178171] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5291f0ad-c078-6caf-6101-2fe2d5ef803c" [ 734.178171] env[68437]: _type = "Task" [ 734.178171] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.181511] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.181511] env[68437]: value = "task-2943826" [ 734.181511] env[68437]: _type = "Task" [ 734.181511] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.188971] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5291f0ad-c078-6caf-6101-2fe2d5ef803c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.197545] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943826, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.220597] env[68437]: INFO nova.compute.manager [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Took 32.82 seconds to build instance. [ 734.354428] env[68437]: DEBUG nova.compute.utils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 734.361248] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 734.361736] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 734.449640] env[68437]: DEBUG nova.policy [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59b6e538d77d441e852466b24b70e0a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0e56fa6cd94413d82963b143143f519', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 734.494314] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943823, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.631737] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.771436} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.634899] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 734.634899] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 734.634899] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa162e20-3c5b-466b-b67e-ba4cd53586ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.642462] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 734.642462] env[68437]: value = "task-2943827" [ 734.642462] env[68437]: _type = "Task" [ 734.642462] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.656578] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943827, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.690243] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5291f0ad-c078-6caf-6101-2fe2d5ef803c, 'name': SearchDatastore_Task, 'duration_secs': 0.063094} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.693290] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.693290] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ad773afa-fc0a-4380-901d-af013ce55f2b/ad773afa-fc0a-4380-901d-af013ce55f2b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 734.694536] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac51fdd4-a5f5-4351-962b-b25b3f5c779c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.702974] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943826, 'name': CreateVM_Task, 'duration_secs': 0.391938} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.705839] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 734.707066] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.707116] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.707474] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 734.711518] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff0a7d4a-30af-44b1-9cb4-9efc8a8f2ae5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.713531] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 734.713531] env[68437]: value = "task-2943828" [ 734.713531] env[68437]: _type = "Task" [ 734.713531] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.719388] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 734.719388] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ff6127-d97d-b961-8f89-5b87168b2543" [ 734.719388] env[68437]: _type = "Task" [ 734.719388] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.728303] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e92f1c3-07d7-4529-aa64-6cbf38d5b872 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.632s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.728303] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943828, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.735858] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ff6127-d97d-b961-8f89-5b87168b2543, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.857807] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 734.972749] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Successfully created port: d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.990558] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95973ae5-c7ef-412a-8ac8-70c527018e3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.001982] env[68437]: DEBUG oslo_vmware.api [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2943823, 'name': PowerOnVM_Task, 'duration_secs': 0.585194} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.003998] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 735.004205] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-06e5ac2c-8988-4380-b51b-cd4a5f51b7d2 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance '1186da93-57aa-40f4-8aae-702d039844d4' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 735.008749] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454f63d4-aa57-4a1e-a4ab-76abac125f32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.047275] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468942ef-edce-428d-8b05-95ff19db720d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.058310] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e60a46-dad6-4be1-a912-82e6f3010b25 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.075701] env[68437]: DEBUG nova.compute.provider_tree [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.153131] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943827, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064908} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.153257] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 735.154115] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7092576-7c80-426f-838a-4b8aec46844f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.174292] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 735.174644] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9354e6e0-bbb5-4a6d-8708-81594fc808ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.191324] env[68437]: DEBUG nova.network.neutron [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Updated VIF entry in instance network info cache for port cc91b233-efdf-4cb6-9817-3f48a59237be. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 735.191799] env[68437]: DEBUG nova.network.neutron [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Updating instance_info_cache with network_info: [{"id": "cc91b233-efdf-4cb6-9817-3f48a59237be", "address": "fa:16:3e:35:da:52", "network": {"id": "4c17c5a2-048e-4021-8471-3f6519d65387", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-4738106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03cd64940cc64e7baceabbc7983889df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc91b233-ef", "ovs_interfaceid": "cc91b233-efdf-4cb6-9817-3f48a59237be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.198145] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 735.198145] env[68437]: value = "task-2943829" [ 735.198145] env[68437]: _type = "Task" [ 735.198145] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.207496] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943829, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.223350] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943828, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.233789] env[68437]: DEBUG nova.compute.manager [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 735.239462] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ff6127-d97d-b961-8f89-5b87168b2543, 'name': SearchDatastore_Task, 'duration_secs': 0.023844} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.239462] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.239735] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.240040] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.240508] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.240508] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.240843] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d5c96d4-ebf3-4cf6-9a68-8c8f091e92f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.250110] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.251219] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 735.254960] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76e5f2b7-b2c2-479b-bbd7-3f16eb88257b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.260900] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 735.260900] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529cf20e-6d10-f68e-975a-bdcf51e92c5e" [ 735.260900] env[68437]: _type = "Task" [ 735.260900] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.271341] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529cf20e-6d10-f68e-975a-bdcf51e92c5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.374839] env[68437]: INFO nova.compute.manager [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Rescuing [ 735.375275] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.375447] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.375671] env[68437]: DEBUG nova.network.neutron [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 735.581244] env[68437]: DEBUG nova.scheduler.client.report [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 735.694340] env[68437]: DEBUG oslo_concurrency.lockutils [req-18fb1aaa-9eb1-4cea-84a3-a19ea6745e20 req-4c3e9b11-961c-4ac1-a7ad-874b07c8506f service nova] Releasing lock "refresh_cache-c74569b8-dfc9-4a74-9d25-74b484bd9477" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.709263] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943829, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.724017] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943828, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.929595} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.724017] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ad773afa-fc0a-4380-901d-af013ce55f2b/ad773afa-fc0a-4380-901d-af013ce55f2b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 735.724240] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 735.724435] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4dee0227-1884-4257-b277-289dd155ac49 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.730741] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 735.730741] env[68437]: value = "task-2943830" [ 735.730741] env[68437]: _type = "Task" [ 735.730741] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.739932] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.768631] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.774472] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529cf20e-6d10-f68e-975a-bdcf51e92c5e, 'name': SearchDatastore_Task, 'duration_secs': 0.012085} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.775498] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af87117d-ba94-409f-967c-ab3f4047977a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.780576] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 735.780576] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52fd4f80-8973-759f-1e67-a7284c37a1f6" [ 735.780576] env[68437]: _type = "Task" [ 735.780576] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.788635] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fd4f80-8973-759f-1e67-a7284c37a1f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.879066] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 735.915883] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 735.916213] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.916380] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 735.916565] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.916766] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 735.916849] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 735.917074] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 735.917235] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 735.917399] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 735.918076] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 735.918131] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 735.919415] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b7562a-2c12-4ef7-b065-7fcb0eb51d17 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.932782] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f01f715-dcf9-4a4f-abe2-637b02add24e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.090686] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.242s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.094445] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.011s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.095168] env[68437]: INFO nova.compute.claims [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.139271] env[68437]: INFO nova.scheduler.client.report [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Deleted allocations for instance 1537e626-f2ec-4b5d-bcba-50cd583dff31 [ 736.213797] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943829, 'name': ReconfigVM_Task, 'duration_secs': 0.718293} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.213797] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.213797] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2fc84d14-2678-4a5c-9bc1-7c0909376a65 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.223654] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 736.223654] env[68437]: value = "task-2943832" [ 736.223654] env[68437]: _type = "Task" [ 736.223654] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.234841] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943832, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.243075] env[68437]: DEBUG nova.network.neutron [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Updating instance_info_cache with network_info: [{"id": "9fbfd56e-861b-488a-afc9-9efe25097c73", "address": "fa:16:3e:be:cf:7a", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fbfd56e-86", "ovs_interfaceid": "9fbfd56e-861b-488a-afc9-9efe25097c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.248268] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127038} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.248726] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 736.250043] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132d0bda-adde-41c5-8b94-ccaeb456a04a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.279422] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] ad773afa-fc0a-4380-901d-af013ce55f2b/ad773afa-fc0a-4380-901d-af013ce55f2b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 736.280657] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edd570fb-9cd1-4d0c-9a72-b2104bfb3055 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.309771] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fd4f80-8973-759f-1e67-a7284c37a1f6, 'name': SearchDatastore_Task, 'duration_secs': 0.017803} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.311607] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.311795] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] c74569b8-dfc9-4a74-9d25-74b484bd9477/c74569b8-dfc9-4a74-9d25-74b484bd9477.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 736.312352] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 736.312352] env[68437]: value = "task-2943833" [ 736.312352] env[68437]: _type = "Task" [ 736.312352] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.312546] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa0954cd-11b1-479e-9453-fb3295854bef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.080989] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Successfully updated port: d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 737.092590] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.095120] env[68437]: DEBUG nova.compute.manager [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 737.096318] env[68437]: DEBUG nova.compute.manager [req-4d9d1512-82bd-4c4e-9c78-c05fd9265985 req-6241925d-7257-4ff2-81ce-813f75a1e2ff service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Received event network-changed-102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 737.096500] env[68437]: DEBUG nova.compute.manager [req-4d9d1512-82bd-4c4e-9c78-c05fd9265985 req-6241925d-7257-4ff2-81ce-813f75a1e2ff service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Refreshing instance network info cache due to event network-changed-102fc7ce-ac0b-465b-8073-7ba895ea1293. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 737.096711] env[68437]: DEBUG oslo_concurrency.lockutils [req-4d9d1512-82bd-4c4e-9c78-c05fd9265985 req-6241925d-7257-4ff2-81ce-813f75a1e2ff service nova] Acquiring lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.096850] env[68437]: DEBUG oslo_concurrency.lockutils [req-4d9d1512-82bd-4c4e-9c78-c05fd9265985 req-6241925d-7257-4ff2-81ce-813f75a1e2ff service nova] Acquired lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.097511] env[68437]: DEBUG nova.network.neutron [req-4d9d1512-82bd-4c4e-9c78-c05fd9265985 req-6241925d-7257-4ff2-81ce-813f75a1e2ff service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Refreshing network info cache for port 102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 737.102626] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eac35a95-1711-4c43-9b42-b73303528587 tempest-ImagesNegativeTestJSON-1118564223 tempest-ImagesNegativeTestJSON-1118564223-project-member] Lock "1537e626-f2ec-4b5d-bcba-50cd583dff31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.313s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.110084] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2e1d61-11c3-4a14-b292-d033548e7931 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.113131] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 737.113131] env[68437]: value = "task-2943834" [ 737.113131] env[68437]: _type = "Task" [ 737.113131] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.124954] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943833, 'name': ReconfigVM_Task, 'duration_secs': 0.509697} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.130448] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Reconfigured VM instance instance-00000017 to attach disk [datastore1] ad773afa-fc0a-4380-901d-af013ce55f2b/ad773afa-fc0a-4380-901d-af013ce55f2b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 737.131214] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943832, 'name': Rename_Task, 'duration_secs': 0.247452} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.137235] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b74ae3dd-1e31-42bf-9134-4d8ae0e6e293 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.138718] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 737.143310] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d19b6a82-a951-4696-bb04-106613464b2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.146059] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943834, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.162365] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 737.162365] env[68437]: value = "task-2943836" [ 737.162365] env[68437]: _type = "Task" [ 737.162365] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.162365] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 737.162365] env[68437]: value = "task-2943835" [ 737.162365] env[68437]: _type = "Task" [ 737.162365] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.175950] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943835, 'name': Rename_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.179168] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.604704] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "refresh_cache-67312d87-cc63-4dc7-b9c1-9c8d349a4756" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.604900] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "refresh_cache-67312d87-cc63-4dc7-b9c1-9c8d349a4756" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.605137] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 737.627837] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943834, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.645710] env[68437]: INFO nova.compute.manager [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] instance snapshotting [ 737.652663] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8c9ee4-0ef2-4f5c-895d-b4c3e0e2569c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.661381] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "1186da93-57aa-40f4-8aae-702d039844d4" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.661742] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.662032] env[68437]: DEBUG nova.compute.manager [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Going to confirm migration 1 {{(pid=68437) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 737.695420] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6061e37-3d40-41bc-a6a1-408891fa0a98 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.709056] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943836, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.709490] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943835, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.133466] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943834, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.672908} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.133749] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] c74569b8-dfc9-4a74-9d25-74b484bd9477/c74569b8-dfc9-4a74-9d25-74b484bd9477.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 738.134012] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 738.134823] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0170b79a-0a53-4797-bbb3-4e2e1439c6b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.145438] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 738.148888] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 738.148888] env[68437]: value = "task-2943837" [ 738.148888] env[68437]: _type = "Task" [ 738.148888] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.160852] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943837, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.199626] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943835, 'name': Rename_Task, 'duration_secs': 0.78853} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.199626] env[68437]: DEBUG oslo_vmware.api [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943836, 'name': PowerOnVM_Task, 'duration_secs': 0.774854} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.201353] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 738.203350] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 738.203350] env[68437]: DEBUG nova.compute.manager [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 738.203350] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ad9c1c3-dbe2-445c-9d6d-502382a80cba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.204356] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f353ed-2074-4d29-8e31-bd9c55c56325 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.219299] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 738.219672] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 738.219672] env[68437]: value = "task-2943838" [ 738.219672] env[68437]: _type = "Task" [ 738.219672] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.220143] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6399f369-bb58-4e96-8496-220bc3dce0b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.236373] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.241510] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 738.241510] env[68437]: value = "task-2943839" [ 738.241510] env[68437]: _type = "Task" [ 738.241510] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.252807] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943839, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.368360] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.368360] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.368360] env[68437]: DEBUG nova.network.neutron [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 738.368360] env[68437]: DEBUG nova.objects.instance [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lazy-loading 'info_cache' on Instance uuid 1186da93-57aa-40f4-8aae-702d039844d4 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 738.374968] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1546251a-5b7f-41cd-b505-4e605939b8e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.383578] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00b5e1e-6fe8-4601-b90a-8a180769129c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.420650] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939e7735-c95f-4aa0-bbbb-14375d73f886 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.423961] env[68437]: DEBUG nova.network.neutron [req-4d9d1512-82bd-4c4e-9c78-c05fd9265985 req-6241925d-7257-4ff2-81ce-813f75a1e2ff service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updated VIF entry in instance network info cache for port 102fc7ce-ac0b-465b-8073-7ba895ea1293. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 738.424328] env[68437]: DEBUG nova.network.neutron [req-4d9d1512-82bd-4c4e-9c78-c05fd9265985 req-6241925d-7257-4ff2-81ce-813f75a1e2ff service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updating instance_info_cache with network_info: [{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.431798] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fdd931-6cc0-4b7f-b651-17fea8d76579 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.448117] env[68437]: DEBUG nova.compute.provider_tree [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.492542] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Updating instance_info_cache with network_info: [{"id": "d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee", "address": "fa:16:3e:32:c6:6f", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5a3099e-d4", "ovs_interfaceid": "d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.659928] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943837, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.132603} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.660537] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 738.661431] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b111f0-3b8a-469d-bf9e-64dcc3bf8cbd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.669018] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.669018] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d13d7d2-7999-47ac-af9f-d4589c3c637c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.688630] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] c74569b8-dfc9-4a74-9d25-74b484bd9477/c74569b8-dfc9-4a74-9d25-74b484bd9477.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 738.690939] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60403ebc-3b7e-412d-a731-db8b7411ed67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.708011] env[68437]: DEBUG nova.compute.manager [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Received event network-vif-plugged-d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 738.708514] env[68437]: DEBUG oslo_concurrency.lockutils [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] Acquiring lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.708878] env[68437]: DEBUG oslo_concurrency.lockutils [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] Lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.709210] env[68437]: DEBUG oslo_concurrency.lockutils [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] Lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.709510] env[68437]: DEBUG nova.compute.manager [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] No waiting events found dispatching network-vif-plugged-d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 738.709805] env[68437]: WARNING nova.compute.manager [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Received unexpected event network-vif-plugged-d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee for instance with vm_state building and task_state spawning. [ 738.710125] env[68437]: DEBUG nova.compute.manager [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Received event network-changed-d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 738.710412] env[68437]: DEBUG nova.compute.manager [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Refreshing instance network info cache due to event network-changed-d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 738.711047] env[68437]: DEBUG oslo_concurrency.lockutils [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] Acquiring lock "refresh_cache-67312d87-cc63-4dc7-b9c1-9c8d349a4756" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.712798] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 738.712798] env[68437]: value = "task-2943840" [ 738.712798] env[68437]: _type = "Task" [ 738.712798] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.725239] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 738.725239] env[68437]: value = "task-2943841" [ 738.725239] env[68437]: _type = "Task" [ 738.725239] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.730356] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943840, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.740298] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.752832] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943838, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.753171] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.759218] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943839, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.932960] env[68437]: DEBUG oslo_concurrency.lockutils [req-4d9d1512-82bd-4c4e-9c78-c05fd9265985 req-6241925d-7257-4ff2-81ce-813f75a1e2ff service nova] Releasing lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.952174] env[68437]: DEBUG nova.scheduler.client.report [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 738.998311] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "refresh_cache-67312d87-cc63-4dc7-b9c1-9c8d349a4756" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.998311] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Instance network_info: |[{"id": "d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee", "address": "fa:16:3e:32:c6:6f", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5a3099e-d4", "ovs_interfaceid": "d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 738.998800] env[68437]: DEBUG oslo_concurrency.lockutils [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] Acquired lock "refresh_cache-67312d87-cc63-4dc7-b9c1-9c8d349a4756" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.998800] env[68437]: DEBUG nova.network.neutron [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Refreshing network info cache for port d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 738.998800] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:c6:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '028bae2d-fe6c-4207-b4a3-3fab45fbf1d6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5a3099e-d4f3-4dfd-9371-2124a7d9b3ee', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 739.007978] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Creating folder: Project (f0e56fa6cd94413d82963b143143f519). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.014936] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-076447e8-9ef0-4617-a123-4709439ef036 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.034275] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Created folder: Project (f0e56fa6cd94413d82963b143143f519) in parent group-v590848. [ 739.034536] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Creating folder: Instances. Parent ref: group-v590923. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.035196] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b461754f-b0e4-4dcf-8a44-9359f52ff6bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.054166] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Created folder: Instances in parent group-v590923. [ 739.054166] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.056379] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 739.056678] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f0857ad-7df4-4ce6-8c5b-4d0315af150a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.080922] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 739.080922] env[68437]: value = "task-2943844" [ 739.080922] env[68437]: _type = "Task" [ 739.080922] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.092546] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943844, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.231203] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943840, 'name': PowerOffVM_Task, 'duration_secs': 0.468221} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.241449] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 739.242386] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32bfadd-49ad-405a-b9c6-09e85c293d39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.258205] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943841, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.258205] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943838, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.283119] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf592acc-1526-44b4-a28d-1d458fa681ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.288663] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943839, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.336476] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.336476] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d43cdb35-4a0e-4b65-9ed8-1476de967172 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.345503] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 739.345503] env[68437]: value = "task-2943845" [ 739.345503] env[68437]: _type = "Task" [ 739.345503] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.358406] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.419865] env[68437]: DEBUG nova.network.neutron [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Updated VIF entry in instance network info cache for port d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 739.420269] env[68437]: DEBUG nova.network.neutron [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Updating instance_info_cache with network_info: [{"id": "d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee", "address": "fa:16:3e:32:c6:6f", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5a3099e-d4", "ovs_interfaceid": "d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.467808] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.374s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.468290] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 739.471089] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.067s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.478050] env[68437]: INFO nova.compute.claims [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 739.596934] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943844, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.746530] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943841, 'name': ReconfigVM_Task, 'duration_secs': 0.718938} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.747400] env[68437]: DEBUG oslo_vmware.api [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943838, 'name': PowerOnVM_Task, 'duration_secs': 1.170364} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.747735] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Reconfigured VM instance instance-00000018 to attach disk [datastore1] c74569b8-dfc9-4a74-9d25-74b484bd9477/c74569b8-dfc9-4a74-9d25-74b484bd9477.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.751277] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.751610] env[68437]: INFO nova.compute.manager [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Took 11.88 seconds to spawn the instance on the hypervisor. [ 739.751918] env[68437]: DEBUG nova.compute.manager [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.752295] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9e8e743-04f7-46b8-8c0b-ff7101d70336 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.754660] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668be8da-f02d-4d3f-b555-6b3a375923d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.764793] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943839, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.768057] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 739.768057] env[68437]: value = "task-2943846" [ 739.768057] env[68437]: _type = "Task" [ 739.768057] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.776180] env[68437]: DEBUG nova.network.neutron [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance_info_cache with network_info: [{"id": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "address": "fa:16:3e:fc:76:b9", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4fd952c0-79", "ovs_interfaceid": "4fd952c0-7921-4632-b5de-2fe90c4bba05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.784599] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943846, 'name': Rename_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.857465] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 739.857726] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.859121] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.859121] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.859242] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.859675] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb11b7ff-1d58-4691-9195-8b5021325d37 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.872428] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.872685] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 739.876549] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1beb8d8-d0d7-45c9-b56e-ed640de35153 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.880879] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 739.880879] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523c61d1-59d5-dbb4-0182-43d14d54b0c9" [ 739.880879] env[68437]: _type = "Task" [ 739.880879] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.889841] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523c61d1-59d5-dbb4-0182-43d14d54b0c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.924714] env[68437]: DEBUG oslo_concurrency.lockutils [req-390c8e5b-e5f6-41b3-8c91-b06f3a719e9e req-f10b2ed9-49ec-4363-80a0-62dba9726608 service nova] Releasing lock "refresh_cache-67312d87-cc63-4dc7-b9c1-9c8d349a4756" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.984837] env[68437]: DEBUG nova.compute.utils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 739.990762] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 739.990762] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 740.054977] env[68437]: DEBUG nova.policy [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59b6e538d77d441e852466b24b70e0a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0e56fa6cd94413d82963b143143f519', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 740.085340] env[68437]: INFO nova.compute.manager [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Rebuilding instance [ 740.100985] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943844, 'name': CreateVM_Task, 'duration_secs': 0.53454} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.101193] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 740.102207] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.102353] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.102858] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 740.102941] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-953642ed-667e-4c3b-832e-0e6b3b55451e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.110867] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 740.110867] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a08b1e-18f1-6486-2113-03f08035c49f" [ 740.110867] env[68437]: _type = "Task" [ 740.110867] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.124306] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a08b1e-18f1-6486-2113-03f08035c49f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.166622] env[68437]: DEBUG nova.compute.manager [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 740.169734] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd89d7f9-7fd0-42d9-a786-55e12f5e5f69 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.266464] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943839, 'name': CreateSnapshot_Task, 'duration_secs': 1.598705} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.266932] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 740.267550] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97276641-84c5-4f7f-a75d-edf87e5114da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.282878] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "refresh_cache-1186da93-57aa-40f4-8aae-702d039844d4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.282878] env[68437]: DEBUG nova.objects.instance [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lazy-loading 'migration_context' on Instance uuid 1186da93-57aa-40f4-8aae-702d039844d4 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 740.295270] env[68437]: INFO nova.compute.manager [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Took 38.13 seconds to build instance. [ 740.309459] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943846, 'name': Rename_Task, 'duration_secs': 0.348384} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.309459] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 740.309459] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b0eb8a2-2bfe-4c9a-8f05-f6fd79f61248 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.320055] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 740.320055] env[68437]: value = "task-2943848" [ 740.320055] env[68437]: _type = "Task" [ 740.320055] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.330996] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943848, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.391414] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523c61d1-59d5-dbb4-0182-43d14d54b0c9, 'name': SearchDatastore_Task, 'duration_secs': 0.014186} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.392339] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e0697d3-ce77-42b4-89ab-d617ad66e53b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.398706] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 740.398706] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52137866-ad4c-cba6-49ec-735728a85e81" [ 740.398706] env[68437]: _type = "Task" [ 740.398706] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.409711] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52137866-ad4c-cba6-49ec-735728a85e81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.491720] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 740.546254] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Successfully created port: e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 740.626327] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a08b1e-18f1-6486-2113-03f08035c49f, 'name': SearchDatastore_Task, 'duration_secs': 0.014585} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.628362] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.628696] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 740.629273] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.629273] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.629273] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 740.631157] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c855918-496b-4fa5-8e48-2b3532aff880 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.640474] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 740.640698] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 740.641576] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d346a470-69bf-428b-a95f-7f0b3647a089 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.650854] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 740.650854] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52200886-70b0-e901-23b3-013786a9fa68" [ 740.650854] env[68437]: _type = "Task" [ 740.650854] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.662203] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52200886-70b0-e901-23b3-013786a9fa68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.797602] env[68437]: DEBUG nova.objects.base [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Object Instance<1186da93-57aa-40f4-8aae-702d039844d4> lazy-loaded attributes: info_cache,migration_context {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 740.807121] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 740.807908] env[68437]: DEBUG oslo_concurrency.lockutils [None req-aec343b6-5a39-4026-8829-8a7ff253c506 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "ad773afa-fc0a-4380-901d-af013ce55f2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.518s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.808661] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1dadfa-ce50-4eb4-9baa-7311b9bda444 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.812503] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-80d7df42-772b-4da8-9749-040e57e57e52 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.846983] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-333606a4-5265-44f4-b618-3238c831da21 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.850495] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 740.850495] env[68437]: value = "task-2943849" [ 740.850495] env[68437]: _type = "Task" [ 740.850495] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.863594] env[68437]: DEBUG oslo_vmware.api [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 740.863594] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c53e0c-4a8a-8893-ca45-60616f205ae1" [ 740.863594] env[68437]: _type = "Task" [ 740.863594] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.863975] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943848, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.875797] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943849, 'name': CloneVM_Task} progress is 12%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.885443] env[68437]: DEBUG oslo_vmware.api [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c53e0c-4a8a-8893-ca45-60616f205ae1, 'name': SearchDatastore_Task, 'duration_secs': 0.013207} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.885443] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.916304] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52137866-ad4c-cba6-49ec-735728a85e81, 'name': SearchDatastore_Task, 'duration_secs': 0.013255} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.916617] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.916873] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 19dde8dd-eae6-41a0-b147-c505db1cda15/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. {{(pid=68437) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 740.917597] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a01145f1-c145-4932-90a1-9777e43581a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.927430] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 740.927430] env[68437]: value = "task-2943850" [ 740.927430] env[68437]: _type = "Task" [ 740.927430] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.937886] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.173205] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52200886-70b0-e901-23b3-013786a9fa68, 'name': SearchDatastore_Task, 'duration_secs': 0.012851} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.177447] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17210217-8ad4-44f6-b0ef-40a15142558d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.187137] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 741.187635] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 741.187635] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c0aa30-48da-fd88-e53a-6f4a9dba53fa" [ 741.187635] env[68437]: _type = "Task" [ 741.187635] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.188245] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f518412-522e-4f59-9136-d369904c1992 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.201219] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f05323f-538e-49d7-a97f-79e575f22906 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.207024] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 741.207024] env[68437]: value = "task-2943851" [ 741.207024] env[68437]: _type = "Task" [ 741.207024] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.213045] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c0aa30-48da-fd88-e53a-6f4a9dba53fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.217927] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47edfe99-36a9-4f79-97f6-a21f511a0ae9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.230891] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.262701] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80aeeb3-5a49-4319-b958-02cfb9204736 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.275147] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fa8137-7366-4971-8c0c-64a95b52af22 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.297794] env[68437]: DEBUG nova.compute.provider_tree [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.319367] env[68437]: DEBUG nova.compute.manager [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 741.335819] env[68437]: DEBUG oslo_vmware.api [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2943848, 'name': PowerOnVM_Task, 'duration_secs': 0.608004} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.336274] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 741.336535] env[68437]: INFO nova.compute.manager [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Took 10.65 seconds to spawn the instance on the hypervisor. [ 741.339659] env[68437]: DEBUG nova.compute.manager [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 741.339659] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe4b951-d61d-45c0-bb57-87d68365edaf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.365679] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943849, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.443615] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943850, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.446435] env[68437]: INFO nova.compute.manager [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Rescuing [ 741.448027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "refresh_cache-ad773afa-fc0a-4380-901d-af013ce55f2b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.448027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "refresh_cache-ad773afa-fc0a-4380-901d-af013ce55f2b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.448027] env[68437]: DEBUG nova.network.neutron [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 741.509925] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 741.552500] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 741.553440] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.553440] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 741.553440] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.553440] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 741.553440] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 741.553781] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 741.553781] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 741.553906] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 741.554122] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 741.554379] env[68437]: DEBUG nova.virt.hardware [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 741.555236] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2928095e-8111-4550-ad8a-a7722de9d6c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.567450] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d3d8dc-6605-4297-ae40-28ef22ac0ce7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.703320] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c0aa30-48da-fd88-e53a-6f4a9dba53fa, 'name': SearchDatastore_Task, 'duration_secs': 0.027209} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.703674] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.703987] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 67312d87-cc63-4dc7-b9c1-9c8d349a4756/67312d87-cc63-4dc7-b9c1-9c8d349a4756.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 741.704269] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e40ff7a-7aad-4353-a278-3cb55b015d34 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.712361] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 741.712361] env[68437]: value = "task-2943852" [ 741.712361] env[68437]: _type = "Task" [ 741.712361] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.727036] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.727874] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943851, 'name': PowerOffVM_Task, 'duration_secs': 0.19725} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.728229] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 741.728331] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 741.729213] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a552340-62aa-4c35-9719-9a1e18744153 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.742909] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 741.745223] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02b95e49-c9c9-42de-aee8-e0d1b409bdbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.774303] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 741.774540] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 741.774726] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Deleting the datastore file [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 741.775026] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9655573b-e2cd-43ea-9347-9e4fea64e1ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.783203] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 741.783203] env[68437]: value = "task-2943854" [ 741.783203] env[68437]: _type = "Task" [ 741.783203] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.791683] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943854, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.801951] env[68437]: DEBUG nova.scheduler.client.report [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 741.844530] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.863293] env[68437]: INFO nova.compute.manager [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Took 38.39 seconds to build instance. [ 741.868818] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943849, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.939723] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57703} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.940046] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 19dde8dd-eae6-41a0-b147-c505db1cda15/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. [ 741.940878] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da8be6f-580a-4769-912d-455a723d1563 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.975513] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 19dde8dd-eae6-41a0-b147-c505db1cda15/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.975513] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fb3a09a-ccea-4238-92bb-a746ef0fe8f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.999106] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 741.999106] env[68437]: value = "task-2943855" [ 741.999106] env[68437]: _type = "Task" [ 741.999106] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.016043] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943855, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.224505] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943852, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.301700] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943854, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208414} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.302135] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 742.302270] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 742.302542] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.307416] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.836s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.307950] env[68437]: DEBUG nova.compute.manager [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 742.310703] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.278s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.314279] env[68437]: INFO nova.compute.claims [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 742.366194] env[68437]: DEBUG oslo_concurrency.lockutils [None req-52b4acba-6a0b-44dc-922e-37e2ac9f6ee8 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "c74569b8-dfc9-4a74-9d25-74b484bd9477" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.895s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.370786] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943849, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.448486] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Successfully updated port: e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 742.513818] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943855, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.614384] env[68437]: DEBUG nova.network.neutron [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Updating instance_info_cache with network_info: [{"id": "b1d3a732-f87f-4b26-a261-f7dccc5912ac", "address": "fa:16:3e:8b:b5:cc", "network": {"id": "1c341ad6-0e1b-4211-be89-d39bb74a6f59", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-101911256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ae9811689c645a7af2096a600ed6e1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d3a732-f8", "ovs_interfaceid": "b1d3a732-f87f-4b26-a261-f7dccc5912ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.728928] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731524} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.730231] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 67312d87-cc63-4dc7-b9c1-9c8d349a4756/67312d87-cc63-4dc7-b9c1-9c8d349a4756.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 742.730231] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 742.730398] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ce3c955-b0b5-4bcb-8d52-2c9083ddf561 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.738761] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 742.738761] env[68437]: value = "task-2943857" [ 742.738761] env[68437]: _type = "Task" [ 742.738761] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.743253] env[68437]: DEBUG nova.compute.manager [req-b48c8261-f0cb-4d1d-ae01-ad1ab583af29 req-0c1a394c-a52c-4b99-bd53-c7f57d7f200b service nova] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Received event network-vif-plugged-e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 742.743253] env[68437]: DEBUG oslo_concurrency.lockutils [req-b48c8261-f0cb-4d1d-ae01-ad1ab583af29 req-0c1a394c-a52c-4b99-bd53-c7f57d7f200b service nova] Acquiring lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.743253] env[68437]: DEBUG oslo_concurrency.lockutils [req-b48c8261-f0cb-4d1d-ae01-ad1ab583af29 req-0c1a394c-a52c-4b99-bd53-c7f57d7f200b service nova] Lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.743253] env[68437]: DEBUG oslo_concurrency.lockutils [req-b48c8261-f0cb-4d1d-ae01-ad1ab583af29 req-0c1a394c-a52c-4b99-bd53-c7f57d7f200b service nova] Lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.743459] env[68437]: DEBUG nova.compute.manager [req-b48c8261-f0cb-4d1d-ae01-ad1ab583af29 req-0c1a394c-a52c-4b99-bd53-c7f57d7f200b service nova] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] No waiting events found dispatching network-vif-plugged-e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 742.743628] env[68437]: WARNING nova.compute.manager [req-b48c8261-f0cb-4d1d-ae01-ad1ab583af29 req-0c1a394c-a52c-4b99-bd53-c7f57d7f200b service nova] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Received unexpected event network-vif-plugged-e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34 for instance with vm_state building and task_state spawning. [ 742.758455] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943857, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.818778] env[68437]: DEBUG nova.compute.utils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 742.824373] env[68437]: DEBUG nova.compute.manager [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 742.824373] env[68437]: DEBUG nova.network.neutron [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 742.872715] env[68437]: DEBUG nova.compute.manager [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 742.879021] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943849, 'name': CloneVM_Task} progress is 95%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.912037] env[68437]: DEBUG nova.policy [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b7e2c2d7b1d748e5b96fd49fd1285112', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5035a40e44f549e7add64045ef3ab722', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 742.951062] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "refresh_cache-1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.951273] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "refresh_cache-1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.951470] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 743.015025] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943855, 'name': ReconfigVM_Task, 'duration_secs': 0.801373} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.015025] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 19dde8dd-eae6-41a0-b147-c505db1cda15/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 743.015025] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a90ef4d-35e0-41e9-ab11-8cf9a813fa8e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.041762] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88bbfcb8-5e4c-4dbf-84d9-c9c0e2aa9676 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.066017] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 743.066017] env[68437]: value = "task-2943858" [ 743.066017] env[68437]: _type = "Task" [ 743.066017] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.074132] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943858, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.116773] env[68437]: DEBUG oslo_concurrency.lockutils [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "refresh_cache-ad773afa-fc0a-4380-901d-af013ce55f2b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.254704] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943857, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078356} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.257254] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 743.257254] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f00131-6284-40d8-b19f-5d5a546ccb37 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.284048] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 67312d87-cc63-4dc7-b9c1-9c8d349a4756/67312d87-cc63-4dc7-b9c1-9c8d349a4756.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 743.284363] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e55ce0c7-24b1-42cf-84f7-5854cc94ca15 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.312029] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 743.312029] env[68437]: value = "task-2943859" [ 743.312029] env[68437]: _type = "Task" [ 743.312029] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.323267] env[68437]: DEBUG nova.compute.manager [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 743.327031] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943859, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.360331] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 743.360331] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.360642] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 743.360642] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.360740] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 743.360912] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 743.361252] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 743.361436] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 743.361611] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 743.361836] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 743.362061] env[68437]: DEBUG nova.virt.hardware [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 743.363620] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f637f4-6ad1-4bea-9b90-5f31442bb8c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.367998] env[68437]: DEBUG nova.network.neutron [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Successfully created port: 5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 743.399530] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91258ec-5cb5-4835-83fd-772f3673d79c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.406548] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943849, 'name': CloneVM_Task, 'duration_secs': 2.213118} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.407741] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.411894] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Created linked-clone VM from snapshot [ 743.416365] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcbca5f-fb03-4dd8-874c-ce671a222ffd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.419942] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.420198] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.432245] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 743.441169] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 743.442221] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 743.442790] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27001842-0949-485d-be0c-b35709f577ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.462509] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Uploading image 8e4f9c36-d94e-4e97-865b-edaeabb02190 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 743.473574] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 743.473574] env[68437]: value = "task-2943860" [ 743.473574] env[68437]: _type = "Task" [ 743.473574] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.474588] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 743.475219] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-76ef0d5b-313a-406f-810f-4883d144b9c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.489882] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943860, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.491319] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 743.491319] env[68437]: value = "task-2943861" [ 743.491319] env[68437]: _type = "Task" [ 743.491319] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.502944] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943861, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.534744] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 743.578130] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943858, 'name': ReconfigVM_Task, 'duration_secs': 0.339827} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.580572] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 743.581210] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32b949eb-1c6e-4d38-8795-5a910b8ca7c6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.590064] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 743.590064] env[68437]: value = "task-2943862" [ 743.590064] env[68437]: _type = "Task" [ 743.590064] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.599293] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943862, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.824968] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943859, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.866884] env[68437]: DEBUG nova.network.neutron [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Updating instance_info_cache with network_info: [{"id": "e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34", "address": "fa:16:3e:76:80:b6", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3634eb4-0e", "ovs_interfaceid": "e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.974887] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86349353-81e0-463b-a8e0-d339bcb117e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.989610] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943860, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.990922] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d9fd7f-bd7e-4f28-a428-7496e4df540f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.006189] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943861, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.035675] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917d4b76-8867-4af9-a792-b2939bde6a03 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.044379] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047dfcf5-58d8-43e4-bafe-e1112aa55ffc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.062833] env[68437]: DEBUG nova.compute.provider_tree [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.101200] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943862, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.328799] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943859, 'name': ReconfigVM_Task, 'duration_secs': 0.711182} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.329107] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 67312d87-cc63-4dc7-b9c1-9c8d349a4756/67312d87-cc63-4dc7-b9c1-9c8d349a4756.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 744.329994] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91047a99-fea5-42b7-a4b7-84f95748410a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.332698] env[68437]: DEBUG nova.compute.manager [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 744.341902] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 744.341902] env[68437]: value = "task-2943863" [ 744.341902] env[68437]: _type = "Task" [ 744.341902] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.358591] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943863, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.370356] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 744.370819] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.370989] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 744.371197] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.371342] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 744.371488] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 744.371703] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 744.371886] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 744.372135] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 744.372345] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 744.372541] env[68437]: DEBUG nova.virt.hardware [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 744.373544] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d55116f-4e57-459f-9a46-c2ab607a2c4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.378731] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "refresh_cache-1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.379123] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Instance network_info: |[{"id": "e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34", "address": "fa:16:3e:76:80:b6", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3634eb4-0e", "ovs_interfaceid": "e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 744.382019] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:80:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '028bae2d-fe6c-4207-b4a3-3fab45fbf1d6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 744.392347] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 744.392755] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 744.394077] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82cd7e7-3274-43a8-a7e3-3cadfab30d6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.399679] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de79b04f-b475-43e0-a813-c743171fbd73 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.431454] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 744.431454] env[68437]: value = "task-2943865" [ 744.431454] env[68437]: _type = "Task" [ 744.431454] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.443485] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943865, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.489603] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943860, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.509271] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943861, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.562511] env[68437]: DEBUG nova.scheduler.client.report [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 744.604352] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943862, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.660274] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 744.660274] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4625d61-53ca-457c-a71f-cfce3e266a66 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.673026] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 744.673026] env[68437]: value = "task-2943866" [ 744.673026] env[68437]: _type = "Task" [ 744.673026] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.684582] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.854906] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943863, 'name': Rename_Task, 'duration_secs': 0.21454} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.854906] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 744.854906] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dedbcfec-3b4d-4356-a729-7c5d77371c46 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.863091] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 744.863091] env[68437]: value = "task-2943867" [ 744.863091] env[68437]: _type = "Task" [ 744.863091] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.869980] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943867, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.942573] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943865, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.989274] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943860, 'name': CreateVM_Task, 'duration_secs': 1.155373} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.989274] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 744.990188] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.990579] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.991714] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 744.994056] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82afd606-3e3a-4244-b636-e5f25476c5e3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.999017] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 744.999017] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52765bba-5496-af98-13f7-d8864a5b38da" [ 744.999017] env[68437]: _type = "Task" [ 744.999017] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.015022] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52765bba-5496-af98-13f7-d8864a5b38da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.015022] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943861, 'name': Destroy_Task, 'duration_secs': 1.314015} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.015022] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Destroyed the VM [ 745.015678] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 745.016082] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-07317fc5-84de-4a41-938d-53a561efdb3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.026108] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 745.026108] env[68437]: value = "task-2943868" [ 745.026108] env[68437]: _type = "Task" [ 745.026108] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.034599] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943868, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.070018] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.757s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.070018] env[68437]: DEBUG nova.compute.manager [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 745.071720] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.762s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.072225] env[68437]: DEBUG nova.objects.instance [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 745.112492] env[68437]: DEBUG oslo_vmware.api [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943862, 'name': PowerOnVM_Task, 'duration_secs': 1.196996} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.112492] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 745.116018] env[68437]: DEBUG nova.compute.manager [None req-ccf7c5ed-05b9-4e81-ae05-9299a6efc273 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 745.116018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44505e6-90e6-487b-8f5a-03015b35c21e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.189195] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943866, 'name': PowerOffVM_Task, 'duration_secs': 0.291521} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.189195] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 745.189195] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba53a0cc-25cb-4ec9-ae64-02977ef392dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.213606] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3087b370-720d-49c1-bf80-3a8854a168fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.244546] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 745.244883] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa3ef7e2-c365-4822-b683-c4d9f7cf53cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.253556] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 745.253556] env[68437]: value = "task-2943869" [ 745.253556] env[68437]: _type = "Task" [ 745.253556] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.266806] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 745.267176] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 745.267519] env[68437]: DEBUG oslo_concurrency.lockutils [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.267730] env[68437]: DEBUG oslo_concurrency.lockutils [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.267973] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 745.268285] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40df8265-6c46-4610-911e-421522778ffa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.279718] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 745.279718] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 745.280047] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abf13bcc-eaf0-4198-85df-acb8be4f5536 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.286895] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 745.286895] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529e5ad9-6b49-ee82-e87e-ea366119c81e" [ 745.286895] env[68437]: _type = "Task" [ 745.286895] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.297321] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529e5ad9-6b49-ee82-e87e-ea366119c81e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.324958] env[68437]: DEBUG nova.compute.manager [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Received event network-changed-e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 745.324958] env[68437]: DEBUG nova.compute.manager [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Refreshing instance network info cache due to event network-changed-e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 745.325121] env[68437]: DEBUG oslo_concurrency.lockutils [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] Acquiring lock "refresh_cache-1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.325446] env[68437]: DEBUG oslo_concurrency.lockutils [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] Acquired lock "refresh_cache-1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.325639] env[68437]: DEBUG nova.network.neutron [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Refreshing network info cache for port e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 745.373452] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943867, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.396993] env[68437]: DEBUG nova.compute.manager [req-51c66baf-ca25-4acf-b978-5b8548cfb088 req-e50ea472-24ea-407b-b8cb-a5ff0781b151 service nova] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Received event network-vif-plugged-5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 745.397239] env[68437]: DEBUG oslo_concurrency.lockutils [req-51c66baf-ca25-4acf-b978-5b8548cfb088 req-e50ea472-24ea-407b-b8cb-a5ff0781b151 service nova] Acquiring lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.397450] env[68437]: DEBUG oslo_concurrency.lockutils [req-51c66baf-ca25-4acf-b978-5b8548cfb088 req-e50ea472-24ea-407b-b8cb-a5ff0781b151 service nova] Lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.397614] env[68437]: DEBUG oslo_concurrency.lockutils [req-51c66baf-ca25-4acf-b978-5b8548cfb088 req-e50ea472-24ea-407b-b8cb-a5ff0781b151 service nova] Lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.397781] env[68437]: DEBUG nova.compute.manager [req-51c66baf-ca25-4acf-b978-5b8548cfb088 req-e50ea472-24ea-407b-b8cb-a5ff0781b151 service nova] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] No waiting events found dispatching network-vif-plugged-5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 745.397943] env[68437]: WARNING nova.compute.manager [req-51c66baf-ca25-4acf-b978-5b8548cfb088 req-e50ea472-24ea-407b-b8cb-a5ff0781b151 service nova] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Received unexpected event network-vif-plugged-5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58 for instance with vm_state building and task_state spawning. [ 745.444020] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943865, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.499920] env[68437]: DEBUG nova.network.neutron [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Successfully updated port: 5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 745.514011] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52765bba-5496-af98-13f7-d8864a5b38da, 'name': SearchDatastore_Task, 'duration_secs': 0.029825} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.514011] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.514312] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 745.514541] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.537333] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943868, 'name': RemoveSnapshot_Task} progress is 76%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.579064] env[68437]: DEBUG nova.compute.utils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 745.583322] env[68437]: DEBUG nova.compute.manager [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 745.583322] env[68437]: DEBUG nova.network.neutron [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 745.692955] env[68437]: DEBUG nova.policy [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '163bf8776e974009bd2e1256a0a5a089', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '580da549128340b9ab717ba1ada787b7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 745.801561] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529e5ad9-6b49-ee82-e87e-ea366119c81e, 'name': SearchDatastore_Task, 'duration_secs': 0.010585} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.802519] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6b55dff-62e1-48e6-8b11-9889ca0e5292 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.808871] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 745.808871] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e33c8e-6384-cbfa-1066-dee364111d83" [ 745.808871] env[68437]: _type = "Task" [ 745.808871] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.817812] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e33c8e-6384-cbfa-1066-dee364111d83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.872021] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943867, 'name': PowerOnVM_Task, 'duration_secs': 0.682897} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.876917] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 745.879547] env[68437]: INFO nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Took 10.00 seconds to spawn the instance on the hypervisor. [ 745.879781] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 745.880650] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769f33c7-fdbc-48e9-8cdf-ed8e1af95f9a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.970897] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943865, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.009132] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "refresh_cache-aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.009132] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquired lock "refresh_cache-aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.009132] env[68437]: DEBUG nova.network.neutron [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 746.036603] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943868, 'name': RemoveSnapshot_Task} progress is 76%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.083177] env[68437]: DEBUG nova.compute.manager [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 746.086855] env[68437]: DEBUG nova.network.neutron [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Updated VIF entry in instance network info cache for port e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 746.087191] env[68437]: DEBUG nova.network.neutron [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Updating instance_info_cache with network_info: [{"id": "e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34", "address": "fa:16:3e:76:80:b6", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3634eb4-0e", "ovs_interfaceid": "e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.089859] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4d07ae76-94a3-4a00-8226-a127e2d8b4b8 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.091287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.472s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.092714] env[68437]: INFO nova.compute.claims [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.283384] env[68437]: DEBUG nova.network.neutron [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Successfully created port: 9457e907-17df-45cc-b8da-a57bf9901e34 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 746.320784] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e33c8e-6384-cbfa-1066-dee364111d83, 'name': SearchDatastore_Task, 'duration_secs': 0.011548} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.321377] env[68437]: DEBUG oslo_concurrency.lockutils [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.321813] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ad773afa-fc0a-4380-901d-af013ce55f2b/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. {{(pid=68437) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 746.322334] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.322693] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.323048] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96b1ac44-32fb-4900-8bd5-08682bcf294f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.326313] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-505e31ea-1404-4728-9e71-edc1e57fe06d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.335542] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 746.335542] env[68437]: value = "task-2943870" [ 746.335542] env[68437]: _type = "Task" [ 746.335542] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.339984] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.340302] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 746.341456] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e2c73eb-9b1e-4b23-8021-3a3f762610ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.347466] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943870, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.353790] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 746.353790] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525f4e8b-fcaa-bf2a-8d24-70d92b48ab7d" [ 746.353790] env[68437]: _type = "Task" [ 746.353790] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.361525] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525f4e8b-fcaa-bf2a-8d24-70d92b48ab7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.404610] env[68437]: INFO nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Took 38.85 seconds to build instance. [ 746.455638] env[68437]: INFO nova.compute.manager [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Unrescuing [ 746.455638] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.455638] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.455638] env[68437]: DEBUG nova.network.neutron [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 746.455638] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943865, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.540167] env[68437]: DEBUG oslo_vmware.api [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943868, 'name': RemoveSnapshot_Task, 'duration_secs': 1.150644} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.540167] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 746.549200] env[68437]: DEBUG nova.network.neutron [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 746.598414] env[68437]: DEBUG oslo_concurrency.lockutils [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] Releasing lock "refresh_cache-1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.598681] env[68437]: DEBUG nova.compute.manager [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Received event network-changed-cc91b233-efdf-4cb6-9817-3f48a59237be {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 746.598996] env[68437]: DEBUG nova.compute.manager [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Refreshing instance network info cache due to event network-changed-cc91b233-efdf-4cb6-9817-3f48a59237be. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 746.599068] env[68437]: DEBUG oslo_concurrency.lockutils [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] Acquiring lock "refresh_cache-c74569b8-dfc9-4a74-9d25-74b484bd9477" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.599249] env[68437]: DEBUG oslo_concurrency.lockutils [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] Acquired lock "refresh_cache-c74569b8-dfc9-4a74-9d25-74b484bd9477" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.599364] env[68437]: DEBUG nova.network.neutron [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Refreshing network info cache for port cc91b233-efdf-4cb6-9817-3f48a59237be {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 746.764332] env[68437]: DEBUG nova.network.neutron [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Updating instance_info_cache with network_info: [{"id": "5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58", "address": "fa:16:3e:62:79:02", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c68a2d0-4a", "ovs_interfaceid": "5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.847627] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943870, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.867621] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525f4e8b-fcaa-bf2a-8d24-70d92b48ab7d, 'name': SearchDatastore_Task, 'duration_secs': 0.012061} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.868706] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef3e7a61-246f-4e26-b373-060064e9f4ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.876631] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 746.876631] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523aa2ea-5d61-9bb9-8d83-00964fbf5fb4" [ 746.876631] env[68437]: _type = "Task" [ 746.876631] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.890267] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523aa2ea-5d61-9bb9-8d83-00964fbf5fb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.907645] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.097s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.952856] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943865, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.044103] env[68437]: WARNING nova.compute.manager [None req-f2d9116e-0441-4c6a-bb98-27c22d704696 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Image not found during snapshot: nova.exception.ImageNotFound: Image 8e4f9c36-d94e-4e97-865b-edaeabb02190 could not be found. [ 747.096079] env[68437]: DEBUG nova.compute.manager [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 747.130646] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 747.130917] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.131093] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 747.131289] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.131491] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 747.131571] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 747.131782] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 747.131958] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 747.132157] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 747.132334] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 747.132507] env[68437]: DEBUG nova.virt.hardware [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 747.134182] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b476b178-c0e6-450a-9ce0-12c160662b40 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.150524] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb374ad-1133-4aa5-919b-66bafa816c47 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.268315] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Releasing lock "refresh_cache-aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.268650] env[68437]: DEBUG nova.compute.manager [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Instance network_info: |[{"id": "5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58", "address": "fa:16:3e:62:79:02", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c68a2d0-4a", "ovs_interfaceid": "5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 747.269528] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:79:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.278953] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.279397] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.279700] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fda764e-c584-4c51-b28c-5be87c9a9bb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.307862] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.307862] env[68437]: value = "task-2943871" [ 747.307862] env[68437]: _type = "Task" [ 747.307862] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.321660] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943871, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.348818] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943870, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683067} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.350032] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ad773afa-fc0a-4380-901d-af013ce55f2b/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. [ 747.356150] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf2f792-96a2-4c33-8797-faa8bd66f232 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.386933] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] ad773afa-fc0a-4380-901d-af013ce55f2b/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 747.390283] env[68437]: DEBUG nova.network.neutron [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Updating instance_info_cache with network_info: [{"id": "9fbfd56e-861b-488a-afc9-9efe25097c73", "address": "fa:16:3e:be:cf:7a", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9fbfd56e-86", "ovs_interfaceid": "9fbfd56e-861b-488a-afc9-9efe25097c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.398490] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0cbfacf-253c-4e76-a4b8-9d27afc07e11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.419152] env[68437]: DEBUG nova.compute.manager [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 747.430443] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523aa2ea-5d61-9bb9-8d83-00964fbf5fb4, 'name': SearchDatastore_Task, 'duration_secs': 0.063487} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.435062] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.435432] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 747.436148] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 747.436148] env[68437]: value = "task-2943872" [ 747.436148] env[68437]: _type = "Task" [ 747.436148] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.436821] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e09170cc-d365-4699-993f-febd529733fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.456941] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943872, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.457975] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 747.457975] env[68437]: value = "task-2943873" [ 747.457975] env[68437]: _type = "Task" [ 747.457975] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.461886] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943865, 'name': CreateVM_Task, 'duration_secs': 2.586927} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.468988] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 747.471529] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.471782] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.472911] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 747.473337] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-471a06fa-7bef-4237-9ff2-c1d38d90a05d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.480879] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "d7c64aa1-44f8-44f4-9fb6-463033837736" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.480999] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "d7c64aa1-44f8-44f4-9fb6-463033837736" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.482029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "d7c64aa1-44f8-44f4-9fb6-463033837736-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.482029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "d7c64aa1-44f8-44f4-9fb6-463033837736-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.482029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "d7c64aa1-44f8-44f4-9fb6-463033837736-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.483468] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943873, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.485332] env[68437]: INFO nova.compute.manager [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Terminating instance [ 747.491630] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 747.491630] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5267f805-b445-b369-74cc-db02b06947a0" [ 747.491630] env[68437]: _type = "Task" [ 747.491630] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.505108] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5267f805-b445-b369-74cc-db02b06947a0, 'name': SearchDatastore_Task, 'duration_secs': 0.011918} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.505513] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.505817] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 747.506116] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.506299] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.506517] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 747.506814] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e00209eb-cfd6-4c14-908e-895a115bcb9d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.517638] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 747.517826] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 747.518620] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5879581d-7f06-4391-a5e2-a63e53667792 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.532171] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 747.532171] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52573575-bc1e-e334-9e2f-6463e5eb0507" [ 747.532171] env[68437]: _type = "Task" [ 747.532171] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.542846] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52573575-bc1e-e334-9e2f-6463e5eb0507, 'name': SearchDatastore_Task, 'duration_secs': 0.010608} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.543668] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7221d88e-21d3-428c-859c-961352d9c714 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.553651] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 747.553651] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c7dd2c-f357-d63b-ceb4-f102ba17f8a0" [ 747.553651] env[68437]: _type = "Task" [ 747.553651] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.562094] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c7dd2c-f357-d63b-ceb4-f102ba17f8a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.563121] env[68437]: DEBUG nova.network.neutron [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Updated VIF entry in instance network info cache for port cc91b233-efdf-4cb6-9817-3f48a59237be. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 747.563474] env[68437]: DEBUG nova.network.neutron [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Updating instance_info_cache with network_info: [{"id": "cc91b233-efdf-4cb6-9817-3f48a59237be", "address": "fa:16:3e:35:da:52", "network": {"id": "4c17c5a2-048e-4021-8471-3f6519d65387", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-4738106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03cd64940cc64e7baceabbc7983889df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc91b233-ef", "ovs_interfaceid": "cc91b233-efdf-4cb6-9817-3f48a59237be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.711671] env[68437]: DEBUG nova.compute.manager [req-c752405c-a051-4a90-8693-a1147b4f30f5 req-15bb4ce0-6835-4055-a345-865388f7876d service nova] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Received event network-changed-5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 747.711901] env[68437]: DEBUG nova.compute.manager [req-c752405c-a051-4a90-8693-a1147b4f30f5 req-15bb4ce0-6835-4055-a345-865388f7876d service nova] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Refreshing instance network info cache due to event network-changed-5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 747.712579] env[68437]: DEBUG oslo_concurrency.lockutils [req-c752405c-a051-4a90-8693-a1147b4f30f5 req-15bb4ce0-6835-4055-a345-865388f7876d service nova] Acquiring lock "refresh_cache-aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.712579] env[68437]: DEBUG oslo_concurrency.lockutils [req-c752405c-a051-4a90-8693-a1147b4f30f5 req-15bb4ce0-6835-4055-a345-865388f7876d service nova] Acquired lock "refresh_cache-aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.712656] env[68437]: DEBUG nova.network.neutron [req-c752405c-a051-4a90-8693-a1147b4f30f5 req-15bb4ce0-6835-4055-a345-865388f7876d service nova] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Refreshing network info cache for port 5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 747.823840] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943871, 'name': CreateVM_Task, 'duration_secs': 0.43165} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.823840] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 747.824213] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.824424] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.824791] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 747.825380] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a05ebc7-6077-46c2-bd64-1a2691b20e38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.833438] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 747.833438] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528fce97-401d-4c1c-619a-0790c525bd5c" [ 747.833438] env[68437]: _type = "Task" [ 747.833438] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.846040] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528fce97-401d-4c1c-619a-0790c525bd5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.847929] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fed795-dc37-424e-b368-cae4ab5e8bd2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.859100] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a1c90a-fcd2-4de3-b05a-631fcb6d41ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.893011] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "refresh_cache-19dde8dd-eae6-41a0-b147-c505db1cda15" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.893735] env[68437]: DEBUG nova.objects.instance [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lazy-loading 'flavor' on Instance uuid 19dde8dd-eae6-41a0-b147-c505db1cda15 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.896734] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e90b71a-eafe-466f-8333-5600ad14d779 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.907279] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776c585d-1023-4228-9080-dc004c10db5e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.931633] env[68437]: DEBUG nova.compute.provider_tree [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.950835] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943872, 'name': ReconfigVM_Task, 'duration_secs': 0.377887} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.952121] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.952685] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Reconfigured VM instance instance-00000017 to attach disk [datastore1] ad773afa-fc0a-4380-901d-af013ce55f2b/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 747.953594] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b0a4f2-4fab-40d4-9557-c0757e4c505b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.985848] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7b2988f-2c57-4f50-8052-575b05b416dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.997575] env[68437]: DEBUG nova.compute.manager [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 747.997801] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 747.999340] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7794465a-f79d-4d8e-93cd-a598fcf29ef3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.005318] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943873, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.007021] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 748.007021] env[68437]: value = "task-2943874" [ 748.007021] env[68437]: _type = "Task" [ 748.007021] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.011834] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.012384] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-649fd78a-91ab-407c-8a63-1f40f327514b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.018475] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943874, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.019735] env[68437]: DEBUG oslo_vmware.api [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 748.019735] env[68437]: value = "task-2943875" [ 748.019735] env[68437]: _type = "Task" [ 748.019735] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.028412] env[68437]: DEBUG oslo_vmware.api [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943875, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.064160] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c7dd2c-f357-d63b-ceb4-f102ba17f8a0, 'name': SearchDatastore_Task, 'duration_secs': 0.010901} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.064455] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.064714] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530/1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 748.064977] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6462933-cfa9-42b5-bed6-eff471bce824 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.068732] env[68437]: DEBUG oslo_concurrency.lockutils [req-548f1ee9-23be-4c1a-9149-316f25ef9618 req-90f51c12-5976-4d03-8bb3-24d0fd3c6aa9 service nova] Releasing lock "refresh_cache-c74569b8-dfc9-4a74-9d25-74b484bd9477" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.072407] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 748.072407] env[68437]: value = "task-2943876" [ 748.072407] env[68437]: _type = "Task" [ 748.072407] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.081061] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943876, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.253714] env[68437]: DEBUG nova.network.neutron [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Successfully updated port: 9457e907-17df-45cc-b8da-a57bf9901e34 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.344642] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528fce97-401d-4c1c-619a-0790c525bd5c, 'name': SearchDatastore_Task, 'duration_secs': 0.059359} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.345099] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.345365] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.345693] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.345845] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.346898] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.347017] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c52945ae-3a83-48d1-b1f6-40d09953a841 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.359253] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.359253] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.362902] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cabdbc4-34d2-4de0-b863-201f8b7da0e9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.369654] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 748.369654] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520e991b-93fc-2175-1e90-0d8c0a30fb77" [ 748.369654] env[68437]: _type = "Task" [ 748.369654] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.383237] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Acquiring lock "29e9555b-f928-43e7-a3a3-869ed07d7326" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.383237] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "29e9555b-f928-43e7-a3a3-869ed07d7326" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.383467] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520e991b-93fc-2175-1e90-0d8c0a30fb77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.400051] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b695cae7-7549-4ce5-bee6-39af6e14285b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.427258] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.427674] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4b48445-7248-4542-b2a5-33104a225eb5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.435864] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 748.435864] env[68437]: value = "task-2943877" [ 748.435864] env[68437]: _type = "Task" [ 748.435864] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.441842] env[68437]: DEBUG nova.scheduler.client.report [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 748.453810] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943877, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.492981] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943873, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57852} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.494526] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 748.494526] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 748.494526] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72290f45-be2a-4c36-9aaf-8f1a98b44324 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.503887] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 748.503887] env[68437]: value = "task-2943878" [ 748.503887] env[68437]: _type = "Task" [ 748.503887] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.514830] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.521560] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943874, 'name': ReconfigVM_Task, 'duration_secs': 0.373214} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.525201] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 748.525906] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60d1f9e1-e7e7-43af-ac89-b3b3ae5608e1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.537347] env[68437]: DEBUG oslo_vmware.api [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943875, 'name': PowerOffVM_Task, 'duration_secs': 0.334697} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.539722] env[68437]: DEBUG nova.network.neutron [req-c752405c-a051-4a90-8693-a1147b4f30f5 req-15bb4ce0-6835-4055-a345-865388f7876d service nova] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Updated VIF entry in instance network info cache for port 5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 748.540923] env[68437]: DEBUG nova.network.neutron [req-c752405c-a051-4a90-8693-a1147b4f30f5 req-15bb4ce0-6835-4055-a345-865388f7876d service nova] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Updating instance_info_cache with network_info: [{"id": "5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58", "address": "fa:16:3e:62:79:02", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c68a2d0-4a", "ovs_interfaceid": "5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.541358] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.541532] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 748.541859] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 748.541859] env[68437]: value = "task-2943879" [ 748.541859] env[68437]: _type = "Task" [ 748.541859] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.542286] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-207b5179-92ff-4886-b65a-2fc826b98285 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.556458] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943879, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.587040] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943876, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.644302] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 748.644649] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 748.645036] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Deleting the datastore file [datastore2] d7c64aa1-44f8-44f4-9fb6-463033837736 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.645223] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cc53baf-2550-4d79-b724-8971b9fb7d6e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.654666] env[68437]: DEBUG oslo_vmware.api [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 748.654666] env[68437]: value = "task-2943881" [ 748.654666] env[68437]: _type = "Task" [ 748.654666] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.665373] env[68437]: DEBUG oslo_vmware.api [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.758241] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquiring lock "refresh_cache-b92efa60-ef18-4578-b00d-6a2438e7eacf" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.758461] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquired lock "refresh_cache-b92efa60-ef18-4578-b00d-6a2438e7eacf" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.758573] env[68437]: DEBUG nova.network.neutron [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 748.881347] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520e991b-93fc-2175-1e90-0d8c0a30fb77, 'name': SearchDatastore_Task, 'duration_secs': 0.016358} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.882240] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269fa8e6-27d9-4cba-865f-75a1da356bec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.889390] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 748.889390] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525f64dd-09b0-58b2-f57e-09dc8edf2816" [ 748.889390] env[68437]: _type = "Task" [ 748.889390] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.907570] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525f64dd-09b0-58b2-f57e-09dc8edf2816, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.948086] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943877, 'name': PowerOffVM_Task, 'duration_secs': 0.458987} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.949064] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.858s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.949694] env[68437]: DEBUG nova.compute.manager [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 748.953538] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.960812] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Reconfiguring VM instance instance-0000000b to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 748.961488] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.979s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.963050] env[68437]: INFO nova.compute.claims [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.965956] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb5d2aae-b229-4a35-a920-d8bbdf61d5e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.987571] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 748.987571] env[68437]: value = "task-2943882" [ 748.987571] env[68437]: _type = "Task" [ 748.987571] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.000222] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943882, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.005894] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquiring lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.005998] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.020688] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100601} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.020969] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 749.021768] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017bb558-cb8e-4614-ae8d-73dddde865d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.048510] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.049180] env[68437]: DEBUG oslo_concurrency.lockutils [req-c752405c-a051-4a90-8693-a1147b4f30f5 req-15bb4ce0-6835-4055-a345-865388f7876d service nova] Releasing lock "refresh_cache-aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.049981] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29f7c333-246d-49b0-8ce7-c62e4108f2f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.084588] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 749.084588] env[68437]: value = "task-2943883" [ 749.084588] env[68437]: _type = "Task" [ 749.084588] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.084588] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943879, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.091297] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943876, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.684719} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.092060] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530/1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 749.092315] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 749.092590] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9abedfc7-319a-4e9c-bdc7-6e1690f0fbf8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.098300] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943883, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.104231] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 749.104231] env[68437]: value = "task-2943884" [ 749.104231] env[68437]: _type = "Task" [ 749.104231] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.115873] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943884, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.166735] env[68437]: DEBUG oslo_vmware.api [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2943881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299347} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.166999] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 749.167252] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 749.167669] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 749.167860] env[68437]: INFO nova.compute.manager [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Took 1.17 seconds to destroy the instance on the hypervisor. [ 749.168126] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 749.168673] env[68437]: DEBUG nova.compute.manager [-] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 749.168771] env[68437]: DEBUG nova.network.neutron [-] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 749.339632] env[68437]: DEBUG nova.network.neutron [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 749.400254] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525f64dd-09b0-58b2-f57e-09dc8edf2816, 'name': SearchDatastore_Task, 'duration_secs': 0.029253} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.400550] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.400843] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] aa39767e-1ae7-4881-b0a8-e7b66e1ceed2/aa39767e-1ae7-4881-b0a8-e7b66e1ceed2.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 749.401126] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63dfcbee-03ea-412e-b9d1-9f898fb52fde {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.409426] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 749.409426] env[68437]: value = "task-2943885" [ 749.409426] env[68437]: _type = "Task" [ 749.409426] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.418386] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943885, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.463229] env[68437]: DEBUG nova.compute.utils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 749.464886] env[68437]: DEBUG nova.compute.manager [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 749.465071] env[68437]: DEBUG nova.network.neutron [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 749.500875] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943882, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.519393] env[68437]: DEBUG nova.policy [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cf9b0dcb1774cc486f4168c1cea40d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8fb35616c64449f9a7b85f2d7e7d3c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 749.532451] env[68437]: DEBUG nova.compute.manager [req-f4364aa7-09d5-4728-a7ec-b374fea33eaa req-e91830b4-f8d7-4d65-bc4b-8fbe80ccefb3 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Received event network-vif-deleted-29fb0e88-6864-4d4b-b480-2f08c9984421 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 749.532662] env[68437]: INFO nova.compute.manager [req-f4364aa7-09d5-4728-a7ec-b374fea33eaa req-e91830b4-f8d7-4d65-bc4b-8fbe80ccefb3 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Neutron deleted interface 29fb0e88-6864-4d4b-b480-2f08c9984421; detaching it from the instance and deleting it from the info cache [ 749.532816] env[68437]: DEBUG nova.network.neutron [req-f4364aa7-09d5-4728-a7ec-b374fea33eaa req-e91830b4-f8d7-4d65-bc4b-8fbe80ccefb3 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.556546] env[68437]: DEBUG nova.network.neutron [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Updating instance_info_cache with network_info: [{"id": "9457e907-17df-45cc-b8da-a57bf9901e34", "address": "fa:16:3e:b0:74:43", "network": {"id": "260635a4-08b1-4ef0-8347-6f14d256fce0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2002916601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "580da549128340b9ab717ba1ada787b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9457e907-17", "ovs_interfaceid": "9457e907-17df-45cc-b8da-a57bf9901e34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.564222] env[68437]: DEBUG oslo_vmware.api [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2943879, 'name': PowerOnVM_Task, 'duration_secs': 0.696528} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.564501] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 749.567131] env[68437]: DEBUG nova.compute.manager [None req-75c79f02-a83d-4a95-87d5-13771d25b2e6 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 749.567870] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad590e9-d51d-4f76-b131-fcb3ec730713 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.597905] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943883, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.616187] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943884, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.171348} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.616187] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 749.616590] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5c4292-22c6-49cd-9c99-b9f26ab3901f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.643437] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530/1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.644504] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f1103ac-608b-4388-9605-96c4d168beb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.667554] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 749.667554] env[68437]: value = "task-2943886" [ 749.667554] env[68437]: _type = "Task" [ 749.667554] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.677348] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943886, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.744916] env[68437]: DEBUG nova.compute.manager [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Received event network-vif-plugged-9457e907-17df-45cc-b8da-a57bf9901e34 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 749.745522] env[68437]: DEBUG oslo_concurrency.lockutils [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] Acquiring lock "b92efa60-ef18-4578-b00d-6a2438e7eacf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.745794] env[68437]: DEBUG oslo_concurrency.lockutils [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] Lock "b92efa60-ef18-4578-b00d-6a2438e7eacf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.746020] env[68437]: DEBUG oslo_concurrency.lockutils [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] Lock "b92efa60-ef18-4578-b00d-6a2438e7eacf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.746299] env[68437]: DEBUG nova.compute.manager [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] No waiting events found dispatching network-vif-plugged-9457e907-17df-45cc-b8da-a57bf9901e34 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 749.746581] env[68437]: WARNING nova.compute.manager [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Received unexpected event network-vif-plugged-9457e907-17df-45cc-b8da-a57bf9901e34 for instance with vm_state building and task_state spawning. [ 749.746885] env[68437]: DEBUG nova.compute.manager [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Received event network-changed-9457e907-17df-45cc-b8da-a57bf9901e34 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 749.747053] env[68437]: DEBUG nova.compute.manager [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Refreshing instance network info cache due to event network-changed-9457e907-17df-45cc-b8da-a57bf9901e34. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 749.747229] env[68437]: DEBUG oslo_concurrency.lockutils [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] Acquiring lock "refresh_cache-b92efa60-ef18-4578-b00d-6a2438e7eacf" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.803995] env[68437]: DEBUG nova.network.neutron [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Successfully created port: ed35f15a-aaef-467f-9f0a-437e412e5bb4 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 749.922582] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943885, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.945325] env[68437]: DEBUG nova.network.neutron [-] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.969821] env[68437]: DEBUG nova.compute.manager [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 750.001915] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943882, 'name': ReconfigVM_Task, 'duration_secs': 0.721102} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.001915] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Reconfigured VM instance instance-0000000b to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 750.001915] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 750.001915] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1df06016-ffad-41ee-9adc-f5a36c10adb5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.014678] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 750.014678] env[68437]: value = "task-2943887" [ 750.014678] env[68437]: _type = "Task" [ 750.014678] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.034381] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.035607] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d3047c9-d765-45d3-bc3c-7cb0c139f316 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.049425] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d8bdf9-bc80-40be-86e3-9fc03470f4ea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.066695] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Releasing lock "refresh_cache-b92efa60-ef18-4578-b00d-6a2438e7eacf" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.067040] env[68437]: DEBUG nova.compute.manager [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Instance network_info: |[{"id": "9457e907-17df-45cc-b8da-a57bf9901e34", "address": "fa:16:3e:b0:74:43", "network": {"id": "260635a4-08b1-4ef0-8347-6f14d256fce0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2002916601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "580da549128340b9ab717ba1ada787b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9457e907-17", "ovs_interfaceid": "9457e907-17df-45cc-b8da-a57bf9901e34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 750.067586] env[68437]: DEBUG oslo_concurrency.lockutils [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] Acquired lock "refresh_cache-b92efa60-ef18-4578-b00d-6a2438e7eacf" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.067779] env[68437]: DEBUG nova.network.neutron [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Refreshing network info cache for port 9457e907-17df-45cc-b8da-a57bf9901e34 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 750.069078] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:74:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9457e907-17df-45cc-b8da-a57bf9901e34', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.076863] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Creating folder: Project (580da549128340b9ab717ba1ada787b7). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.078133] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8344497d-5d02-4ccf-9566-e4d4a612a37e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.099642] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943883, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.104289] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Created folder: Project (580da549128340b9ab717ba1ada787b7) in parent group-v590848. [ 750.104507] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Creating folder: Instances. Parent ref: group-v590931. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.105085] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e12e062-2520-44fd-8d74-4695011e8bef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.126384] env[68437]: DEBUG nova.compute.manager [req-f4364aa7-09d5-4728-a7ec-b374fea33eaa req-e91830b4-f8d7-4d65-bc4b-8fbe80ccefb3 service nova] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Detach interface failed, port_id=29fb0e88-6864-4d4b-b480-2f08c9984421, reason: Instance d7c64aa1-44f8-44f4-9fb6-463033837736 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 750.131981] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Created folder: Instances in parent group-v590931. [ 750.132359] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 750.133734] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.133734] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26152370-c45b-4242-8b6c-d100391f62cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.164051] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.164051] env[68437]: value = "task-2943890" [ 750.164051] env[68437]: _type = "Task" [ 750.164051] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.181959] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943890, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.183831] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943886, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.424995] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943885, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.825576} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.428937] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] aa39767e-1ae7-4881-b0a8-e7b66e1ceed2/aa39767e-1ae7-4881-b0a8-e7b66e1ceed2.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 750.429302] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.429982] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-601e7a3a-5d3b-44dd-9f1e-f1ffcfeacea2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.445585] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 750.445585] env[68437]: value = "task-2943891" [ 750.445585] env[68437]: _type = "Task" [ 750.445585] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.450751] env[68437]: INFO nova.compute.manager [-] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Took 1.28 seconds to deallocate network for instance. [ 750.464881] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943891, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.528043] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943887, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.600892] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943883, 'name': ReconfigVM_Task, 'duration_secs': 1.048782} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.601210] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36/5435b4d8-46c3-43e3-b11b-cbeb580e2f36.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.601816] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a0608055-d62f-4e96-9912-ec1416664735 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.610342] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 750.610342] env[68437]: value = "task-2943892" [ 750.610342] env[68437]: _type = "Task" [ 750.610342] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.620183] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943892, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.651342] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94f87e2-e161-45af-bb6b-65a0c1c928d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.659685] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692497c1-ba92-4694-ace3-db220011c707 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.702220] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c54232-319d-450e-a649-5fdd587552ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.709086] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943890, 'name': CreateVM_Task, 'duration_secs': 0.407479} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.709861] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 750.710587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.710788] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.711252] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 750.711634] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2be55c5-fec0-4516-b7a6-96e9368cc8d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.719838] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943886, 'name': ReconfigVM_Task, 'duration_secs': 0.561805} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.720643] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530/1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.722498] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88b17b4-ca6b-4770-864a-51592ba5f48c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.728132] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0727ac3d-6291-4bd6-b4ee-e9ed40e9d53f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.729950] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 750.729950] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52db4147-1207-2278-6502-c5ab1d42b7b1" [ 750.729950] env[68437]: _type = "Task" [ 750.729950] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.745206] env[68437]: DEBUG nova.compute.provider_tree [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.748519] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 750.748519] env[68437]: value = "task-2943893" [ 750.748519] env[68437]: _type = "Task" [ 750.748519] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.754960] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52db4147-1207-2278-6502-c5ab1d42b7b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.761467] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943893, 'name': Rename_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.898583] env[68437]: DEBUG nova.network.neutron [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Updated VIF entry in instance network info cache for port 9457e907-17df-45cc-b8da-a57bf9901e34. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 750.898961] env[68437]: DEBUG nova.network.neutron [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Updating instance_info_cache with network_info: [{"id": "9457e907-17df-45cc-b8da-a57bf9901e34", "address": "fa:16:3e:b0:74:43", "network": {"id": "260635a4-08b1-4ef0-8347-6f14d256fce0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2002916601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "580da549128340b9ab717ba1ada787b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9457e907-17", "ovs_interfaceid": "9457e907-17df-45cc-b8da-a57bf9901e34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.934563] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.934791] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.955722] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943891, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082592} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.955979] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 750.956784] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e888af1-6038-48b3-be18-5fa509731d38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.962284] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.979977] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] aa39767e-1ae7-4881-b0a8-e7b66e1ceed2/aa39767e-1ae7-4881-b0a8-e7b66e1ceed2.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 750.981325] env[68437]: DEBUG nova.compute.manager [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 750.984304] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e038ab8-8c33-4259-95c4-cb6afa333070 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.006021] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 751.006021] env[68437]: value = "task-2943894" [ 751.006021] env[68437]: _type = "Task" [ 751.006021] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.016384] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943894, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.028244] env[68437]: DEBUG oslo_vmware.api [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2943887, 'name': PowerOnVM_Task, 'duration_secs': 0.694949} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.030330] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.030592] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.030764] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.030955] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.031116] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.031272] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.031499] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.031827] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.031827] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.031995] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.032234] env[68437]: DEBUG nova.virt.hardware [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.032530] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 751.032743] env[68437]: DEBUG nova.compute.manager [None req-1020ca0c-42f5-47b6-974a-78ce7fe6a54e tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 751.033612] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c93d326-9f45-463b-a10d-43537fc0bcb0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.036661] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c42633b-2f6b-4d23-843b-1030150f7eb6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.049151] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c6071a-ca73-4204-966a-a6679bd1fc53 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.120877] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943892, 'name': Rename_Task, 'duration_secs': 0.338425} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.121155] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 751.121397] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a040e62-4d6f-4bc7-a6ad-d622d6a850a4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.128595] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Waiting for the task: (returnval){ [ 751.128595] env[68437]: value = "task-2943895" [ 751.128595] env[68437]: _type = "Task" [ 751.128595] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.138519] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943895, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.243102] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52db4147-1207-2278-6502-c5ab1d42b7b1, 'name': SearchDatastore_Task, 'duration_secs': 0.041943} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.245288] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.245288] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.245288] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.245288] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.245629] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.246054] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ade62599-96bd-47ae-90df-69c144428afe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.250463] env[68437]: DEBUG nova.scheduler.client.report [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.263042] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.263271] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.264970] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-541670aa-7be5-4ee9-9340-249d6409f106 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.273478] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943893, 'name': Rename_Task, 'duration_secs': 0.46656} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.275367] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 751.276135] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-146c5c15-f074-41c5-9302-2d0edb181ca4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.279983] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 751.279983] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5272121c-e067-55a1-4ea4-f5c6cfd3fa5e" [ 751.279983] env[68437]: _type = "Task" [ 751.279983] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.289069] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 751.289069] env[68437]: value = "task-2943896" [ 751.289069] env[68437]: _type = "Task" [ 751.289069] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.307851] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5272121c-e067-55a1-4ea4-f5c6cfd3fa5e, 'name': SearchDatastore_Task, 'duration_secs': 0.018421} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.308223] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943896, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.309369] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0c702ce-4d47-4787-aa38-7dab201b20c7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.318393] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 751.318393] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520cddeb-cf5f-c48b-4eb9-730b1234ee1b" [ 751.318393] env[68437]: _type = "Task" [ 751.318393] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.329224] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520cddeb-cf5f-c48b-4eb9-730b1234ee1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.401714] env[68437]: DEBUG oslo_concurrency.lockutils [req-b6c4b39d-1612-4e76-88b2-08ad13564e00 req-f565d961-7a44-47d1-b702-ff90a367acbf service nova] Releasing lock "refresh_cache-b92efa60-ef18-4578-b00d-6a2438e7eacf" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.518529] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943894, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.575773] env[68437]: DEBUG nova.network.neutron [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Successfully updated port: ed35f15a-aaef-467f-9f0a-437e412e5bb4 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 751.597472] env[68437]: DEBUG nova.compute.manager [req-f1b77bb3-2dfd-4c87-8f0c-739139d84df1 req-d4382b95-bc1d-49a5-86ea-b8158bf78023 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Received event network-vif-plugged-ed35f15a-aaef-467f-9f0a-437e412e5bb4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 751.598142] env[68437]: DEBUG oslo_concurrency.lockutils [req-f1b77bb3-2dfd-4c87-8f0c-739139d84df1 req-d4382b95-bc1d-49a5-86ea-b8158bf78023 service nova] Acquiring lock "9a7c248f-5262-4f03-aace-f22c4976bb0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.598387] env[68437]: DEBUG oslo_concurrency.lockutils [req-f1b77bb3-2dfd-4c87-8f0c-739139d84df1 req-d4382b95-bc1d-49a5-86ea-b8158bf78023 service nova] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.598556] env[68437]: DEBUG oslo_concurrency.lockutils [req-f1b77bb3-2dfd-4c87-8f0c-739139d84df1 req-d4382b95-bc1d-49a5-86ea-b8158bf78023 service nova] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.598728] env[68437]: DEBUG nova.compute.manager [req-f1b77bb3-2dfd-4c87-8f0c-739139d84df1 req-d4382b95-bc1d-49a5-86ea-b8158bf78023 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] No waiting events found dispatching network-vif-plugged-ed35f15a-aaef-467f-9f0a-437e412e5bb4 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 751.598892] env[68437]: WARNING nova.compute.manager [req-f1b77bb3-2dfd-4c87-8f0c-739139d84df1 req-d4382b95-bc1d-49a5-86ea-b8158bf78023 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Received unexpected event network-vif-plugged-ed35f15a-aaef-467f-9f0a-437e412e5bb4 for instance with vm_state building and task_state spawning. [ 751.641922] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943895, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.764645] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.803s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.765183] env[68437]: DEBUG nova.compute.manager [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 751.769282] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.203s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.769282] env[68437]: DEBUG nova.objects.instance [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lazy-loading 'resources' on Instance uuid 5abc2c5a-2177-4d77-97ce-872808bb47ee {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 751.801990] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943896, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.836033] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520cddeb-cf5f-c48b-4eb9-730b1234ee1b, 'name': SearchDatastore_Task, 'duration_secs': 0.016941} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.836033] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.836033] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] b92efa60-ef18-4578-b00d-6a2438e7eacf/b92efa60-ef18-4578-b00d-6a2438e7eacf.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 751.836033] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9715726b-957b-48b1-a856-b193c183d8b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.845988] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 751.845988] env[68437]: value = "task-2943897" [ 751.845988] env[68437]: _type = "Task" [ 751.845988] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.856436] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943897, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.018354] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943894, 'name': ReconfigVM_Task, 'duration_secs': 0.769491} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.018467] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Reconfigured VM instance instance-0000001b to attach disk [datastore1] aa39767e-1ae7-4881-b0a8-e7b66e1ceed2/aa39767e-1ae7-4881-b0a8-e7b66e1ceed2.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.019129] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70aab44d-4c7a-441f-9dc0-f216255d22ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.025687] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 752.025687] env[68437]: value = "task-2943898" [ 752.025687] env[68437]: _type = "Task" [ 752.025687] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.034910] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943898, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.080965] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "refresh_cache-9a7c248f-5262-4f03-aace-f22c4976bb0f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.081800] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "refresh_cache-9a7c248f-5262-4f03-aace-f22c4976bb0f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.081800] env[68437]: DEBUG nova.network.neutron [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 752.142811] env[68437]: DEBUG oslo_vmware.api [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Task: {'id': task-2943895, 'name': PowerOnVM_Task, 'duration_secs': 0.733455} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.143124] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.143354] env[68437]: DEBUG nova.compute.manager [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.144227] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1ecc78-6f01-4ea0-ba8d-9e2f1d08ddd1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.272354] env[68437]: DEBUG nova.compute.utils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 752.278038] env[68437]: DEBUG nova.compute.manager [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 752.278567] env[68437]: DEBUG nova.network.neutron [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 752.304328] env[68437]: DEBUG oslo_vmware.api [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943896, 'name': PowerOnVM_Task, 'duration_secs': 0.654407} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.308155] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 752.308155] env[68437]: INFO nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Took 10.80 seconds to spawn the instance on the hypervisor. [ 752.308454] env[68437]: DEBUG nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.310693] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56dd1e2-3d66-481e-afb6-3439e97d5492 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.352093] env[68437]: DEBUG nova.policy [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5698adb41be34f3c8e562f9997e73810', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '27916c244400443bbc2eef5bf0d6b552', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 752.363986] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943897, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.539664] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943898, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.622740] env[68437]: DEBUG nova.network.neutron [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 752.663592] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.731830] env[68437]: DEBUG nova.network.neutron [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Successfully created port: 175b0028-1953-4195-b2a1-ec9d791f429e {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 752.779426] env[68437]: DEBUG nova.compute.manager [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 752.789923] env[68437]: DEBUG nova.network.neutron [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Updating instance_info_cache with network_info: [{"id": "ed35f15a-aaef-467f-9f0a-437e412e5bb4", "address": "fa:16:3e:5f:f5:32", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped35f15a-aa", "ovs_interfaceid": "ed35f15a-aaef-467f-9f0a-437e412e5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.840763] env[68437]: INFO nova.compute.manager [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Took 42.80 seconds to build instance. [ 752.867057] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943897, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604131} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.867057] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] b92efa60-ef18-4578-b00d-6a2438e7eacf/b92efa60-ef18-4578-b00d-6a2438e7eacf.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 752.867057] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 752.867057] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-634ae944-ec20-48c8-9247-a33639b13c2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.874989] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bbb52c-1f3e-4975-8a50-3a196c33e0d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.877953] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 752.877953] env[68437]: value = "task-2943899" [ 752.877953] env[68437]: _type = "Task" [ 752.877953] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.890110] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabfabe0-a9b1-402e-a050-d974f38bdfa9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.896947] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943899, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.933774] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53061a6-9840-4547-bf11-7f1e15fb6b0f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.948020] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34fe8d1-ca6e-4c98-a2c6-97bb13f7523f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.965691] env[68437]: DEBUG nova.compute.provider_tree [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.038548] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943898, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.292310] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "refresh_cache-9a7c248f-5262-4f03-aace-f22c4976bb0f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.292656] env[68437]: DEBUG nova.compute.manager [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Instance network_info: |[{"id": "ed35f15a-aaef-467f-9f0a-437e412e5bb4", "address": "fa:16:3e:5f:f5:32", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped35f15a-aa", "ovs_interfaceid": "ed35f15a-aaef-467f-9f0a-437e412e5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 753.293081] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:f5:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '101a44fc-ffde-4e3e-ad82-363454ae458b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed35f15a-aaef-467f-9f0a-437e412e5bb4', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 753.300604] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Creating folder: Project (f8fb35616c64449f9a7b85f2d7e7d3c2). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 753.300901] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52a59d4d-b505-4a58-a270-256d5b0ae425 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.314533] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Created folder: Project (f8fb35616c64449f9a7b85f2d7e7d3c2) in parent group-v590848. [ 753.314730] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Creating folder: Instances. Parent ref: group-v590934. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 753.314984] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ea6da37-178a-4204-a479-2b02ecc2c6f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.327684] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Created folder: Instances in parent group-v590934. [ 753.327933] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 753.328141] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 753.328354] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1241a6c1-18aa-4351-9915-ee43293e9939 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.343306] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f5bd94c4-b547-461e-8582-930f23b2bbaa tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.484s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.351035] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 753.351035] env[68437]: value = "task-2943902" [ 753.351035] env[68437]: _type = "Task" [ 753.351035] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.359767] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943902, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.388257] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943899, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084029} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.388541] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 753.389334] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2baea7-61c7-41d3-b3d4-6eb574878f94 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.413495] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] b92efa60-ef18-4578-b00d-6a2438e7eacf/b92efa60-ef18-4578-b00d-6a2438e7eacf.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 753.413801] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc30561b-447d-491e-89a3-9a3be81fab43 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.435108] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 753.435108] env[68437]: value = "task-2943903" [ 753.435108] env[68437]: _type = "Task" [ 753.435108] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.444555] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943903, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.470029] env[68437]: DEBUG nova.scheduler.client.report [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 753.541253] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943898, 'name': Rename_Task, 'duration_secs': 1.188325} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.542056] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 753.542444] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7eea6c35-b3bc-444b-9fcd-f8b4a7cc288a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.551731] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 753.551731] env[68437]: value = "task-2943904" [ 753.551731] env[68437]: _type = "Task" [ 753.551731] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.561130] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.623942] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "5435b4d8-46c3-43e3-b11b-cbeb580e2f36" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.624559] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "5435b4d8-46c3-43e3-b11b-cbeb580e2f36" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.626049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "5435b4d8-46c3-43e3-b11b-cbeb580e2f36-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.626049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "5435b4d8-46c3-43e3-b11b-cbeb580e2f36-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.626049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "5435b4d8-46c3-43e3-b11b-cbeb580e2f36-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.627747] env[68437]: INFO nova.compute.manager [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Terminating instance [ 753.673418] env[68437]: DEBUG nova.compute.manager [req-921d21e9-0bec-49ae-8cf7-c11ba5d3e0d7 req-d68062ed-531d-4f26-8702-5d4f0a12afb1 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Received event network-changed-ed35f15a-aaef-467f-9f0a-437e412e5bb4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 753.673918] env[68437]: DEBUG nova.compute.manager [req-921d21e9-0bec-49ae-8cf7-c11ba5d3e0d7 req-d68062ed-531d-4f26-8702-5d4f0a12afb1 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Refreshing instance network info cache due to event network-changed-ed35f15a-aaef-467f-9f0a-437e412e5bb4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 753.673918] env[68437]: DEBUG oslo_concurrency.lockutils [req-921d21e9-0bec-49ae-8cf7-c11ba5d3e0d7 req-d68062ed-531d-4f26-8702-5d4f0a12afb1 service nova] Acquiring lock "refresh_cache-9a7c248f-5262-4f03-aace-f22c4976bb0f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.674109] env[68437]: DEBUG oslo_concurrency.lockutils [req-921d21e9-0bec-49ae-8cf7-c11ba5d3e0d7 req-d68062ed-531d-4f26-8702-5d4f0a12afb1 service nova] Acquired lock "refresh_cache-9a7c248f-5262-4f03-aace-f22c4976bb0f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.674338] env[68437]: DEBUG nova.network.neutron [req-921d21e9-0bec-49ae-8cf7-c11ba5d3e0d7 req-d68062ed-531d-4f26-8702-5d4f0a12afb1 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Refreshing network info cache for port ed35f15a-aaef-467f-9f0a-437e412e5bb4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 753.790099] env[68437]: DEBUG nova.compute.manager [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 753.815621] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 753.815926] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 753.816120] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 753.816358] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 753.816563] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 753.816735] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 753.817026] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 753.817229] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 753.817474] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 753.817680] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 753.817966] env[68437]: DEBUG nova.virt.hardware [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 753.819779] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a71da2d-12a3-4c23-a85d-3af2a59f0eb9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.830632] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4a09f7-4f50-4095-9050-dbd1b2ea7ad5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.847183] env[68437]: DEBUG nova.compute.manager [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 753.861035] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943902, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.946650] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943903, 'name': ReconfigVM_Task, 'duration_secs': 0.325392} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.946987] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Reconfigured VM instance instance-0000001c to attach disk [datastore1] b92efa60-ef18-4578-b00d-6a2438e7eacf/b92efa60-ef18-4578-b00d-6a2438e7eacf.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 753.947640] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6db0412a-481f-42f7-b226-083de9493723 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.956482] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 753.956482] env[68437]: value = "task-2943905" [ 753.956482] env[68437]: _type = "Task" [ 753.956482] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.969502] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943905, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.976996] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.208s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.980271] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.460s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.980535] env[68437]: DEBUG nova.objects.instance [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lazy-loading 'resources' on Instance uuid 0649ee2f-cd90-4597-b7c4-09f2acaf3f54 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 754.005964] env[68437]: INFO nova.scheduler.client.report [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted allocations for instance 5abc2c5a-2177-4d77-97ce-872808bb47ee [ 754.063657] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943904, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.087697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.088360] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.088722] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.088981] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.089267] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.091706] env[68437]: INFO nova.compute.manager [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Terminating instance [ 754.131570] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "refresh_cache-5435b4d8-46c3-43e3-b11b-cbeb580e2f36" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.131570] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquired lock "refresh_cache-5435b4d8-46c3-43e3-b11b-cbeb580e2f36" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.131570] env[68437]: DEBUG nova.network.neutron [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 754.225591] env[68437]: DEBUG oslo_concurrency.lockutils [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.225858] env[68437]: DEBUG oslo_concurrency.lockutils [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.226192] env[68437]: DEBUG oslo_concurrency.lockutils [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.226253] env[68437]: DEBUG oslo_concurrency.lockutils [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.226427] env[68437]: DEBUG oslo_concurrency.lockutils [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.232933] env[68437]: INFO nova.compute.manager [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Terminating instance [ 754.322643] env[68437]: DEBUG nova.network.neutron [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Successfully updated port: 175b0028-1953-4195-b2a1-ec9d791f429e {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 754.375042] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943902, 'name': CreateVM_Task, 'duration_secs': 0.521857} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.375042] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 754.375042] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.375042] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.375042] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 754.375624] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-745973ac-20ec-4b53-a388-3cc010346821 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.381307] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.383211] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 754.383211] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a09f7a-96f8-3863-eb7e-e7c8c19be87e" [ 754.383211] env[68437]: _type = "Task" [ 754.383211] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.394126] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a09f7a-96f8-3863-eb7e-e7c8c19be87e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.468249] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943905, 'name': Rename_Task, 'duration_secs': 0.167615} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.468620] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 754.468918] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3cf0b33-98d8-4012-9947-0d7ca6f35932 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.477314] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 754.477314] env[68437]: value = "task-2943906" [ 754.477314] env[68437]: _type = "Task" [ 754.477314] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.488178] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943906, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.517518] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87d5004-5000-4a41-a736-2c365e1821d4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "5abc2c5a-2177-4d77-97ce-872808bb47ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.791s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.565610] env[68437]: DEBUG oslo_vmware.api [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943904, 'name': PowerOnVM_Task, 'duration_secs': 0.882367} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.566395] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 754.566762] env[68437]: INFO nova.compute.manager [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Took 10.23 seconds to spawn the instance on the hypervisor. [ 754.567479] env[68437]: DEBUG nova.compute.manager [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 754.569450] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec95c65-872a-4f55-8740-eda0c62e7348 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.578908] env[68437]: DEBUG nova.network.neutron [req-921d21e9-0bec-49ae-8cf7-c11ba5d3e0d7 req-d68062ed-531d-4f26-8702-5d4f0a12afb1 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Updated VIF entry in instance network info cache for port ed35f15a-aaef-467f-9f0a-437e412e5bb4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 754.579372] env[68437]: DEBUG nova.network.neutron [req-921d21e9-0bec-49ae-8cf7-c11ba5d3e0d7 req-d68062ed-531d-4f26-8702-5d4f0a12afb1 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Updating instance_info_cache with network_info: [{"id": "ed35f15a-aaef-467f-9f0a-437e412e5bb4", "address": "fa:16:3e:5f:f5:32", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped35f15a-aa", "ovs_interfaceid": "ed35f15a-aaef-467f-9f0a-437e412e5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.599630] env[68437]: DEBUG nova.compute.manager [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 754.599630] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 754.599630] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e772ead4-4f7e-4d69-80fe-ce7f65172cd0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.610186] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.617022] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ddb3a59-05f1-42f6-8b76-10b088978d4f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.625121] env[68437]: DEBUG oslo_vmware.api [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 754.625121] env[68437]: value = "task-2943907" [ 754.625121] env[68437]: _type = "Task" [ 754.625121] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.641680] env[68437]: DEBUG oslo_vmware.api [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943907, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.684826] env[68437]: DEBUG nova.network.neutron [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 754.731516] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.731783] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.739785] env[68437]: DEBUG nova.compute.manager [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 754.739998] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 754.741012] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4a4e04-ab83-4f8b-bb50-050733153bb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.752109] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.755251] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c535fc0-c6fc-41e9-aa5b-5fb687791b5b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.765815] env[68437]: DEBUG oslo_vmware.api [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 754.765815] env[68437]: value = "task-2943908" [ 754.765815] env[68437]: _type = "Task" [ 754.765815] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.780941] env[68437]: DEBUG oslo_vmware.api [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943908, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.788548] env[68437]: DEBUG nova.network.neutron [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.825730] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquiring lock "refresh_cache-56cfa7f3-12ad-42d0-a27f-ab8136a903ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.825730] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquired lock "refresh_cache-56cfa7f3-12ad-42d0-a27f-ab8136a903ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.825730] env[68437]: DEBUG nova.network.neutron [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 754.898429] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a09f7a-96f8-3863-eb7e-e7c8c19be87e, 'name': SearchDatastore_Task, 'duration_secs': 0.014977} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.898757] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.899041] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 754.899689] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.899689] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.899689] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 754.899883] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-552d6080-4008-40ba-94d9-e60d01b262f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.913451] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 754.913974] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 754.914599] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8949d8e7-2d8c-428d-8531-c3329eff8deb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.925551] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 754.925551] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524abb64-91dc-b07b-cf56-fa4bd03f9585" [ 754.925551] env[68437]: _type = "Task" [ 754.925551] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.935563] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524abb64-91dc-b07b-cf56-fa4bd03f9585, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.989639] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943906, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.085789] env[68437]: DEBUG oslo_concurrency.lockutils [req-921d21e9-0bec-49ae-8cf7-c11ba5d3e0d7 req-d68062ed-531d-4f26-8702-5d4f0a12afb1 service nova] Releasing lock "refresh_cache-9a7c248f-5262-4f03-aace-f22c4976bb0f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.104986] env[68437]: INFO nova.compute.manager [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Took 37.74 seconds to build instance. [ 755.135413] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c174fa98-5bf2-4b41-8e0b-af2577ac4ac5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.140850] env[68437]: DEBUG oslo_vmware.api [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943907, 'name': PowerOffVM_Task, 'duration_secs': 0.300135} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.141707] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 755.141932] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 755.142259] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6607021-3f50-4f12-9556-e62857b4181e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.147529] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f5b8a0-c14b-42e3-9ad2-1d54f677f8a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.180942] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefb77b4-3920-450e-ad15-a518214f517c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.188875] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3a6699-3197-45ca-8af3-16f5aad5b716 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.202807] env[68437]: DEBUG nova.compute.provider_tree [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.228143] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 755.228379] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 755.228563] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleting the datastore file [datastore1] 67312d87-cc63-4dc7-b9c1-9c8d349a4756 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 755.228813] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e80b1e7a-e965-4ef8-8e00-68ebe1e0b561 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.236065] env[68437]: DEBUG oslo_vmware.api [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 755.236065] env[68437]: value = "task-2943910" [ 755.236065] env[68437]: _type = "Task" [ 755.236065] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.245670] env[68437]: DEBUG oslo_vmware.api [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.276474] env[68437]: DEBUG oslo_vmware.api [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943908, 'name': PowerOffVM_Task, 'duration_secs': 0.304743} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.276616] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 755.276773] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 755.277422] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c91e9a8-8722-4e54-b21f-3efbca3b6d29 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.291212] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Releasing lock "refresh_cache-5435b4d8-46c3-43e3-b11b-cbeb580e2f36" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.291652] env[68437]: DEBUG nova.compute.manager [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 755.291874] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 755.292780] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048a066f-6549-4426-8bfe-56147d99d794 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.301053] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 755.301317] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de79360d-f3fd-4acb-98bf-3fecb0191e51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.309076] env[68437]: DEBUG oslo_vmware.api [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 755.309076] env[68437]: value = "task-2943912" [ 755.309076] env[68437]: _type = "Task" [ 755.309076] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.317858] env[68437]: DEBUG oslo_vmware.api [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943912, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.361801] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 755.362112] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 755.362294] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleting the datastore file [datastore2] 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 755.362580] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cd3634e-6707-4f24-a641-a9cd34221345 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.365238] env[68437]: DEBUG nova.network.neutron [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 755.372792] env[68437]: DEBUG oslo_vmware.api [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 755.372792] env[68437]: value = "task-2943913" [ 755.372792] env[68437]: _type = "Task" [ 755.372792] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.381257] env[68437]: DEBUG oslo_vmware.api [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943913, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.436285] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524abb64-91dc-b07b-cf56-fa4bd03f9585, 'name': SearchDatastore_Task, 'duration_secs': 0.031775} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.437135] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79ad6de0-c719-47b6-bcb7-9362690b6a70 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.442540] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 755.442540] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a3a765-18b1-eb38-7e92-49ecdd4ac536" [ 755.442540] env[68437]: _type = "Task" [ 755.442540] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.451311] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a3a765-18b1-eb38-7e92-49ecdd4ac536, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.491355] env[68437]: DEBUG oslo_vmware.api [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2943906, 'name': PowerOnVM_Task, 'duration_secs': 0.583885} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.491682] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 755.491994] env[68437]: INFO nova.compute.manager [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Took 8.40 seconds to spawn the instance on the hypervisor. [ 755.492288] env[68437]: DEBUG nova.compute.manager [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 755.493809] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c981744-fdb3-4834-a702-6b3c88c052cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.601126] env[68437]: DEBUG nova.network.neutron [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Updating instance_info_cache with network_info: [{"id": "175b0028-1953-4195-b2a1-ec9d791f429e", "address": "fa:16:3e:a9:a2:d2", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap175b0028-19", "ovs_interfaceid": "175b0028-1953-4195-b2a1-ec9d791f429e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.606700] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2bb3105e-2a5b-44fc-93ab-a9595a6b21b1 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.371s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.697781] env[68437]: DEBUG nova.compute.manager [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Received event network-vif-plugged-175b0028-1953-4195-b2a1-ec9d791f429e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 755.697998] env[68437]: DEBUG oslo_concurrency.lockutils [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] Acquiring lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.698220] env[68437]: DEBUG oslo_concurrency.lockutils [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] Lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.698353] env[68437]: DEBUG oslo_concurrency.lockutils [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] Lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.698518] env[68437]: DEBUG nova.compute.manager [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] No waiting events found dispatching network-vif-plugged-175b0028-1953-4195-b2a1-ec9d791f429e {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 755.698681] env[68437]: WARNING nova.compute.manager [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Received unexpected event network-vif-plugged-175b0028-1953-4195-b2a1-ec9d791f429e for instance with vm_state building and task_state spawning. [ 755.698916] env[68437]: DEBUG nova.compute.manager [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Received event network-changed-175b0028-1953-4195-b2a1-ec9d791f429e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 755.698989] env[68437]: DEBUG nova.compute.manager [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Refreshing instance network info cache due to event network-changed-175b0028-1953-4195-b2a1-ec9d791f429e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 755.699341] env[68437]: DEBUG oslo_concurrency.lockutils [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] Acquiring lock "refresh_cache-56cfa7f3-12ad-42d0-a27f-ab8136a903ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.705770] env[68437]: DEBUG nova.scheduler.client.report [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 755.747463] env[68437]: DEBUG oslo_vmware.api [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318309} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.747560] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.747696] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 755.747872] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.748051] env[68437]: INFO nova.compute.manager [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Took 1.15 seconds to destroy the instance on the hypervisor. [ 755.748301] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.748493] env[68437]: DEBUG nova.compute.manager [-] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 755.748588] env[68437]: DEBUG nova.network.neutron [-] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 755.820429] env[68437]: DEBUG oslo_vmware.api [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943912, 'name': PowerOffVM_Task, 'duration_secs': 0.20682} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.820699] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 755.820878] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 755.821143] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4017275d-400d-4622-8cb0-864f2ac701e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.851260] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 755.851515] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 755.851711] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Deleting the datastore file [datastore1] 5435b4d8-46c3-43e3-b11b-cbeb580e2f36 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 755.852354] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef5134e6-e972-440c-9789-8760d57b10d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.859971] env[68437]: DEBUG oslo_vmware.api [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for the task: (returnval){ [ 755.859971] env[68437]: value = "task-2943915" [ 755.859971] env[68437]: _type = "Task" [ 755.859971] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.869789] env[68437]: DEBUG oslo_vmware.api [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.883417] env[68437]: DEBUG oslo_vmware.api [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2943913, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.49928} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.883672] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.885184] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 755.885502] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.885763] env[68437]: INFO nova.compute.manager [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Took 1.15 seconds to destroy the instance on the hypervisor. [ 755.886036] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.886238] env[68437]: DEBUG nova.compute.manager [-] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 755.886333] env[68437]: DEBUG nova.network.neutron [-] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 755.955087] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a3a765-18b1-eb38-7e92-49ecdd4ac536, 'name': SearchDatastore_Task, 'duration_secs': 0.012992} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.955532] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.955633] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 9a7c248f-5262-4f03-aace-f22c4976bb0f/9a7c248f-5262-4f03-aace-f22c4976bb0f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 755.955897] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3dca8ad5-6b77-4ca1-abf5-abbad6031206 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.964477] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 755.964477] env[68437]: value = "task-2943916" [ 755.964477] env[68437]: _type = "Task" [ 755.964477] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.973237] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.014323] env[68437]: INFO nova.compute.manager [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Took 34.00 seconds to build instance. [ 756.108154] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Releasing lock "refresh_cache-56cfa7f3-12ad-42d0-a27f-ab8136a903ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.108154] env[68437]: DEBUG nova.compute.manager [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Instance network_info: |[{"id": "175b0028-1953-4195-b2a1-ec9d791f429e", "address": "fa:16:3e:a9:a2:d2", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap175b0028-19", "ovs_interfaceid": "175b0028-1953-4195-b2a1-ec9d791f429e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 756.108350] env[68437]: DEBUG oslo_concurrency.lockutils [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] Acquired lock "refresh_cache-56cfa7f3-12ad-42d0-a27f-ab8136a903ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.108350] env[68437]: DEBUG nova.network.neutron [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Refreshing network info cache for port 175b0028-1953-4195-b2a1-ec9d791f429e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 756.108350] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:a2:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '175b0028-1953-4195-b2a1-ec9d791f429e', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.119594] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Creating folder: Project (27916c244400443bbc2eef5bf0d6b552). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.123197] env[68437]: DEBUG nova.compute.manager [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 756.128613] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f0ddb79-9034-4afd-bce8-235194956884 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.142624] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Created folder: Project (27916c244400443bbc2eef5bf0d6b552) in parent group-v590848. [ 756.142624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Creating folder: Instances. Parent ref: group-v590937. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.142624] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a7e0fe0-a1b3-407c-9428-2d978b38ba20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.156474] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Created folder: Instances in parent group-v590937. [ 756.158172] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 756.158172] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 756.158172] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-977f5ad2-3932-4556-88f5-96fe7ce14193 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.181160] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.181160] env[68437]: value = "task-2943919" [ 756.181160] env[68437]: _type = "Task" [ 756.181160] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.190538] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943919, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.210911] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.231s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.214395] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.404s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.218403] env[68437]: INFO nova.compute.claims [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.241781] env[68437]: INFO nova.scheduler.client.report [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Deleted allocations for instance 0649ee2f-cd90-4597-b7c4-09f2acaf3f54 [ 756.255545] env[68437]: DEBUG nova.compute.manager [req-a6d45a76-4287-4301-a3b7-9507b81229b2 req-c1d410e4-d72a-4bc4-9d7f-b6d808267a7f service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Received event network-vif-deleted-d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 756.256555] env[68437]: INFO nova.compute.manager [req-a6d45a76-4287-4301-a3b7-9507b81229b2 req-c1d410e4-d72a-4bc4-9d7f-b6d808267a7f service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Neutron deleted interface d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee; detaching it from the instance and deleting it from the info cache [ 756.256555] env[68437]: DEBUG nova.network.neutron [req-a6d45a76-4287-4301-a3b7-9507b81229b2 req-c1d410e4-d72a-4bc4-9d7f-b6d808267a7f service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.377340] env[68437]: DEBUG oslo_vmware.api [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Task: {'id': task-2943915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200866} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.377340] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 756.377608] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 756.377660] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 756.377836] env[68437]: INFO nova.compute.manager [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Took 1.09 seconds to destroy the instance on the hypervisor. [ 756.378118] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 756.378319] env[68437]: DEBUG nova.compute.manager [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 756.378416] env[68437]: DEBUG nova.network.neutron [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 756.397796] env[68437]: DEBUG nova.network.neutron [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 756.478413] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943916, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.516152] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d71b5f2b-b28d-437a-9784-4f9e08f56410 tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "b92efa60-ef18-4578-b00d-6a2438e7eacf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.953s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.576245] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.576478] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.576693] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.576875] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.577066] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.579182] env[68437]: INFO nova.compute.manager [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Terminating instance [ 756.628595] env[68437]: DEBUG nova.network.neutron [-] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.646238] env[68437]: DEBUG nova.network.neutron [-] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.654302] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.691240] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943919, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.748339] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7cfbf0e5-d038-45f7-9d43-1b1edcc0a9af tempest-ServerTagsTestJSON-442522135 tempest-ServerTagsTestJSON-442522135-project-member] Lock "0649ee2f-cd90-4597-b7c4-09f2acaf3f54" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.007s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.758294] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e452b566-c7c5-4c4b-8eba-6a8d68d390f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.769644] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e78ad7-4038-49de-b842-02011b1dfa01 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.813737] env[68437]: DEBUG nova.compute.manager [req-a6d45a76-4287-4301-a3b7-9507b81229b2 req-c1d410e4-d72a-4bc4-9d7f-b6d808267a7f service nova] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Detach interface failed, port_id=d5a3099e-d4f3-4dfd-9371-2124a7d9b3ee, reason: Instance 67312d87-cc63-4dc7-b9c1-9c8d349a4756 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 756.902443] env[68437]: DEBUG nova.network.neutron [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.909067] env[68437]: DEBUG nova.network.neutron [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Updated VIF entry in instance network info cache for port 175b0028-1953-4195-b2a1-ec9d791f429e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 756.909067] env[68437]: DEBUG nova.network.neutron [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Updating instance_info_cache with network_info: [{"id": "175b0028-1953-4195-b2a1-ec9d791f429e", "address": "fa:16:3e:a9:a2:d2", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.209", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap175b0028-19", "ovs_interfaceid": "175b0028-1953-4195-b2a1-ec9d791f429e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.976135] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943916, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.022606] env[68437]: DEBUG nova.compute.manager [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 757.083044] env[68437]: DEBUG nova.compute.manager [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 757.083299] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 757.084186] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab37375-9e81-4a75-b2f1-59f71d14bc70 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.094216] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 757.094521] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f61a0eea-8f16-41b3-9ba4-fdcc498f630d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.102776] env[68437]: DEBUG oslo_vmware.api [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 757.102776] env[68437]: value = "task-2943920" [ 757.102776] env[68437]: _type = "Task" [ 757.102776] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.112129] env[68437]: DEBUG oslo_vmware.api [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.132522] env[68437]: INFO nova.compute.manager [-] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Took 1.38 seconds to deallocate network for instance. [ 757.155876] env[68437]: INFO nova.compute.manager [-] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Took 1.27 seconds to deallocate network for instance. [ 757.192943] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943919, 'name': CreateVM_Task, 'duration_secs': 0.678924} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.193832] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 757.194625] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.194821] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.195187] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 757.195751] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f5a3f97-6dfc-4893-9a70-ed574ed6b5f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.201437] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 757.201437] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5285dc81-c253-36a0-b3e4-ed9a74338908" [ 757.201437] env[68437]: _type = "Task" [ 757.201437] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.210760] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5285dc81-c253-36a0-b3e4-ed9a74338908, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.406259] env[68437]: INFO nova.compute.manager [-] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Took 1.03 seconds to deallocate network for instance. [ 757.411617] env[68437]: DEBUG oslo_concurrency.lockutils [req-307ada34-0fba-4095-83e9-53f996ce258a req-7722adac-0358-4e52-a31f-af7e9d7ed517 service nova] Releasing lock "refresh_cache-56cfa7f3-12ad-42d0-a27f-ab8136a903ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.476892] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943916, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.541471] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.618370] env[68437]: DEBUG oslo_vmware.api [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943920, 'name': PowerOffVM_Task, 'duration_secs': 0.242086} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.618657] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 757.618828] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 757.619101] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19f1e57f-7d1b-4e92-b777-38b794deac1a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.639419] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.662925] env[68437]: DEBUG oslo_concurrency.lockutils [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.689043] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 757.690676] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 757.690979] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Deleting the datastore file [datastore1] aa39767e-1ae7-4881-b0a8-e7b66e1ceed2 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 757.691409] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca13d6ef-4c8c-479e-b7ca-eb34759b541e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.701761] env[68437]: DEBUG oslo_vmware.api [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for the task: (returnval){ [ 757.701761] env[68437]: value = "task-2943922" [ 757.701761] env[68437]: _type = "Task" [ 757.701761] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.718814] env[68437]: DEBUG oslo_vmware.api [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.724012] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5285dc81-c253-36a0-b3e4-ed9a74338908, 'name': SearchDatastore_Task, 'duration_secs': 0.039728} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.724012] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.724012] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.724012] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.724358] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.724358] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.728047] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dc2b0f4-03e0-4980-a38e-b39787e29a38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.732555] env[68437]: DEBUG nova.compute.manager [req-232f3a50-80c5-4fc8-9373-42aea45eb0fa req-41a59055-7f49-4ea6-8152-f89f65bd685f service nova] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Received event network-vif-deleted-e3634eb4-0e4a-4cbf-bb26-5d6ddd38df34 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 757.732555] env[68437]: DEBUG nova.compute.manager [req-232f3a50-80c5-4fc8-9373-42aea45eb0fa req-41a59055-7f49-4ea6-8152-f89f65bd685f service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Received event network-changed-9457e907-17df-45cc-b8da-a57bf9901e34 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 757.732666] env[68437]: DEBUG nova.compute.manager [req-232f3a50-80c5-4fc8-9373-42aea45eb0fa req-41a59055-7f49-4ea6-8152-f89f65bd685f service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Refreshing instance network info cache due to event network-changed-9457e907-17df-45cc-b8da-a57bf9901e34. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 757.732878] env[68437]: DEBUG oslo_concurrency.lockutils [req-232f3a50-80c5-4fc8-9373-42aea45eb0fa req-41a59055-7f49-4ea6-8152-f89f65bd685f service nova] Acquiring lock "refresh_cache-b92efa60-ef18-4578-b00d-6a2438e7eacf" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.733020] env[68437]: DEBUG oslo_concurrency.lockutils [req-232f3a50-80c5-4fc8-9373-42aea45eb0fa req-41a59055-7f49-4ea6-8152-f89f65bd685f service nova] Acquired lock "refresh_cache-b92efa60-ef18-4578-b00d-6a2438e7eacf" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.733177] env[68437]: DEBUG nova.network.neutron [req-232f3a50-80c5-4fc8-9373-42aea45eb0fa req-41a59055-7f49-4ea6-8152-f89f65bd685f service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Refreshing network info cache for port 9457e907-17df-45cc-b8da-a57bf9901e34 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 757.745847] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.745847] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.745993] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6415f4e-c825-400d-86b7-ef35e94c88f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.753105] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 757.753105] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523010bb-9249-d4e8-da33-75559e6ff882" [ 757.753105] env[68437]: _type = "Task" [ 757.753105] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.767351] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523010bb-9249-d4e8-da33-75559e6ff882, 'name': SearchDatastore_Task, 'duration_secs': 0.010949} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.767836] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5ea32f9-d7d3-41e3-9b99-b22d2ec3167a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.780025] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 757.780025] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52adc910-7260-4dbf-0967-64c7389c7b58" [ 757.780025] env[68437]: _type = "Task" [ 757.780025] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.785737] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52adc910-7260-4dbf-0967-64c7389c7b58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.821209] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ab2cf7-8411-4845-8aa4-0d4b7eb2f3a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.831050] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf856607-5a11-4e40-9cdf-14accfb63c01 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.863357] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2822f04e-3315-4e4b-9066-295b2e79c9f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.872831] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a332f153-1588-4199-bbb3-66c6385f202f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.888362] env[68437]: DEBUG nova.compute.provider_tree [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.917424] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.977992] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943916, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.61802} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.981027] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 9a7c248f-5262-4f03-aace-f22c4976bb0f/9a7c248f-5262-4f03-aace-f22c4976bb0f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 757.981027] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 757.981027] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ee87263-d733-4098-9f54-3675425dab13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.986947] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 757.986947] env[68437]: value = "task-2943923" [ 757.986947] env[68437]: _type = "Task" [ 757.986947] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.997143] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943923, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.220326] env[68437]: DEBUG oslo_vmware.api [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Task: {'id': task-2943922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.413029} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.220644] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 758.221677] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 758.221677] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 758.221677] env[68437]: INFO nova.compute.manager [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 758.221677] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 758.221677] env[68437]: DEBUG nova.compute.manager [-] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 758.221922] env[68437]: DEBUG nova.network.neutron [-] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 758.291354] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52adc910-7260-4dbf-0967-64c7389c7b58, 'name': SearchDatastore_Task, 'duration_secs': 0.012227} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.291637] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.291907] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 56cfa7f3-12ad-42d0-a27f-ab8136a903ee/56cfa7f3-12ad-42d0-a27f-ab8136a903ee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 758.292189] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad41a96e-6961-4c52-a774-e3a7b138fccd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.301659] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 758.301659] env[68437]: value = "task-2943924" [ 758.301659] env[68437]: _type = "Task" [ 758.301659] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.311175] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.392128] env[68437]: DEBUG nova.scheduler.client.report [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.499991] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943923, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177142} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.500550] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 758.501611] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8918c7f-1213-49bd-a886-9877f040c98c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.535580] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 9a7c248f-5262-4f03-aace-f22c4976bb0f/9a7c248f-5262-4f03-aace-f22c4976bb0f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.539263] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26bf1b3b-41a6-4f4e-a40b-d9fd01754ce1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.566150] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 758.566150] env[68437]: value = "task-2943925" [ 758.566150] env[68437]: _type = "Task" [ 758.566150] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.575490] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943925, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.755210] env[68437]: DEBUG nova.network.neutron [req-232f3a50-80c5-4fc8-9373-42aea45eb0fa req-41a59055-7f49-4ea6-8152-f89f65bd685f service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Updated VIF entry in instance network info cache for port 9457e907-17df-45cc-b8da-a57bf9901e34. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 758.755210] env[68437]: DEBUG nova.network.neutron [req-232f3a50-80c5-4fc8-9373-42aea45eb0fa req-41a59055-7f49-4ea6-8152-f89f65bd685f service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Updating instance_info_cache with network_info: [{"id": "9457e907-17df-45cc-b8da-a57bf9901e34", "address": "fa:16:3e:b0:74:43", "network": {"id": "260635a4-08b1-4ef0-8347-6f14d256fce0", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-2002916601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "580da549128340b9ab717ba1ada787b7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9457e907-17", "ovs_interfaceid": "9457e907-17df-45cc-b8da-a57bf9901e34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.814526] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943924, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.902590] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.903220] env[68437]: DEBUG nova.compute.manager [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 758.906202] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.732s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.906505] env[68437]: DEBUG nova.objects.instance [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lazy-loading 'resources' on Instance uuid f517b14c-320f-4a6e-ae74-f2335e22f7a4 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 759.074912] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943925, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.119889] env[68437]: DEBUG nova.network.neutron [-] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.256417] env[68437]: DEBUG oslo_concurrency.lockutils [req-232f3a50-80c5-4fc8-9373-42aea45eb0fa req-41a59055-7f49-4ea6-8152-f89f65bd685f service nova] Releasing lock "refresh_cache-b92efa60-ef18-4578-b00d-6a2438e7eacf" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.313272] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667396} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.313793] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 56cfa7f3-12ad-42d0-a27f-ab8136a903ee/56cfa7f3-12ad-42d0-a27f-ab8136a903ee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.313905] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.314159] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a5ff39b-d974-4cdd-a5b4-993466f1bf33 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.321739] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 759.321739] env[68437]: value = "task-2943926" [ 759.321739] env[68437]: _type = "Task" [ 759.321739] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.332619] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943926, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.412682] env[68437]: DEBUG nova.compute.utils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 759.414694] env[68437]: DEBUG nova.compute.manager [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 759.414694] env[68437]: DEBUG nova.network.neutron [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 759.505929] env[68437]: DEBUG nova.policy [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cf9b0dcb1774cc486f4168c1cea40d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8fb35616c64449f9a7b85f2d7e7d3c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 759.576121] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943925, 'name': ReconfigVM_Task, 'duration_secs': 0.605981} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.583020] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 9a7c248f-5262-4f03-aace-f22c4976bb0f/9a7c248f-5262-4f03-aace-f22c4976bb0f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 759.583020] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04270653-0b57-4e67-b347-7219841a205a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.588144] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 759.588144] env[68437]: value = "task-2943927" [ 759.588144] env[68437]: _type = "Task" [ 759.588144] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.600322] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943927, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.623131] env[68437]: INFO nova.compute.manager [-] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Took 1.40 seconds to deallocate network for instance. [ 759.756087] env[68437]: DEBUG nova.compute.manager [req-f1a3b0f1-4e67-4e82-82ce-bb5228a88a0d req-27067ccb-65ea-4f09-a1aa-fcc44b2216ff service nova] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Received event network-vif-deleted-5c68a2d0-4a70-4c2b-a743-2e7b8ee67c58 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 759.835807] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943926, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065033} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.836104] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 759.836943] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199ec47c-5b91-4362-903b-9eb84a54a61e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.862301] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 56cfa7f3-12ad-42d0-a27f-ab8136a903ee/56cfa7f3-12ad-42d0-a27f-ab8136a903ee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 759.864074] env[68437]: DEBUG nova.network.neutron [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Successfully created port: c57512fa-960c-429b-97d3-2d33ecf31cf0 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.867480] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f066649-6736-4a69-a78c-4f236cea2051 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.892302] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 759.892302] env[68437]: value = "task-2943928" [ 759.892302] env[68437]: _type = "Task" [ 759.892302] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.904610] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943928, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.917060] env[68437]: DEBUG nova.compute.manager [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 759.974593] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf471c6-df33-4886-8827-c8a6c4ac4c2c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.982991] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd9d98a-813d-4e79-9316-234546193806 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.015700] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e92e454-1365-44af-b7a1-c72599facee4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.024486] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bff7481-b949-4563-8257-af72ee91e49c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.038747] env[68437]: DEBUG nova.compute.provider_tree [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.098642] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943927, 'name': Rename_Task, 'duration_secs': 0.361054} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.098932] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 760.099200] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ec8dcd6-8303-4950-b128-6edce130e1c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.106674] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 760.106674] env[68437]: value = "task-2943929" [ 760.106674] env[68437]: _type = "Task" [ 760.106674] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.115341] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943929, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.133296] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.403741] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.542685] env[68437]: DEBUG nova.scheduler.client.report [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.618770] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943929, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.902844] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943928, 'name': ReconfigVM_Task, 'duration_secs': 0.937321} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.903228] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 56cfa7f3-12ad-42d0-a27f-ab8136a903ee/56cfa7f3-12ad-42d0-a27f-ab8136a903ee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.903835] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ce4945a-06cf-4384-a478-5c9a63a8dc54 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.910746] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 760.910746] env[68437]: value = "task-2943930" [ 760.910746] env[68437]: _type = "Task" [ 760.910746] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.920054] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943930, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.929350] env[68437]: DEBUG nova.compute.manager [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 760.958643] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 760.958910] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.959085] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 760.959275] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.959424] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 760.959575] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 760.959792] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 760.959953] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 760.960139] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 760.960308] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 760.960483] env[68437]: DEBUG nova.virt.hardware [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 760.961398] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e534e43d-38aa-4f88-bd0c-3f6d9eeec8bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.970741] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95f83e1-2a95-4c93-a02f-6d9966b300a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.047743] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.141s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.050131] env[68437]: DEBUG oslo_concurrency.lockutils [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.467s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.050425] env[68437]: DEBUG nova.objects.instance [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lazy-loading 'resources' on Instance uuid a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.070884] env[68437]: INFO nova.scheduler.client.report [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted allocations for instance f517b14c-320f-4a6e-ae74-f2335e22f7a4 [ 761.119504] env[68437]: DEBUG oslo_vmware.api [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943929, 'name': PowerOnVM_Task, 'duration_secs': 0.55757} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.119769] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 761.119966] env[68437]: INFO nova.compute.manager [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Took 10.14 seconds to spawn the instance on the hypervisor. [ 761.120165] env[68437]: DEBUG nova.compute.manager [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 761.120914] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daad0514-87c6-401e-8897-da71c4ebacfe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.421852] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943930, 'name': Rename_Task, 'duration_secs': 0.168607} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.422274] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.422636] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a7ec66c-ea73-44f0-a553-85681956763f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.431212] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 761.431212] env[68437]: value = "task-2943931" [ 761.431212] env[68437]: _type = "Task" [ 761.431212] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.439244] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943931, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.504257] env[68437]: DEBUG nova.compute.manager [req-bc310b09-b74d-4a2e-8b28-42530fe880dd req-8ce9abd7-4be4-4a36-a95a-70c63a185205 service nova] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Received event network-vif-plugged-c57512fa-960c-429b-97d3-2d33ecf31cf0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 761.504257] env[68437]: DEBUG oslo_concurrency.lockutils [req-bc310b09-b74d-4a2e-8b28-42530fe880dd req-8ce9abd7-4be4-4a36-a95a-70c63a185205 service nova] Acquiring lock "f1230046-d368-40ee-b1fa-99df4ab15a10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.504257] env[68437]: DEBUG oslo_concurrency.lockutils [req-bc310b09-b74d-4a2e-8b28-42530fe880dd req-8ce9abd7-4be4-4a36-a95a-70c63a185205 service nova] Lock "f1230046-d368-40ee-b1fa-99df4ab15a10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.504257] env[68437]: DEBUG oslo_concurrency.lockutils [req-bc310b09-b74d-4a2e-8b28-42530fe880dd req-8ce9abd7-4be4-4a36-a95a-70c63a185205 service nova] Lock "f1230046-d368-40ee-b1fa-99df4ab15a10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.504257] env[68437]: DEBUG nova.compute.manager [req-bc310b09-b74d-4a2e-8b28-42530fe880dd req-8ce9abd7-4be4-4a36-a95a-70c63a185205 service nova] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] No waiting events found dispatching network-vif-plugged-c57512fa-960c-429b-97d3-2d33ecf31cf0 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 761.504741] env[68437]: WARNING nova.compute.manager [req-bc310b09-b74d-4a2e-8b28-42530fe880dd req-8ce9abd7-4be4-4a36-a95a-70c63a185205 service nova] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Received unexpected event network-vif-plugged-c57512fa-960c-429b-97d3-2d33ecf31cf0 for instance with vm_state building and task_state spawning. [ 761.579229] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4b8fa0c1-3e37-4a58-912e-990d218fdaf4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "f517b14c-320f-4a6e-ae74-f2335e22f7a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.788s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.608031] env[68437]: DEBUG nova.network.neutron [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Successfully updated port: c57512fa-960c-429b-97d3-2d33ecf31cf0 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.642416] env[68437]: INFO nova.compute.manager [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Took 36.05 seconds to build instance. [ 761.944312] env[68437]: DEBUG oslo_vmware.api [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943931, 'name': PowerOnVM_Task, 'duration_secs': 0.460739} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.944633] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 761.945196] env[68437]: INFO nova.compute.manager [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Took 8.15 seconds to spawn the instance on the hypervisor. [ 761.945396] env[68437]: DEBUG nova.compute.manager [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 761.946386] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434220b8-11fc-4ea9-85d4-2bf6e575b7d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.075199] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afac7cd5-b8bb-4525-8724-0c0d80f16f93 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.083350] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b542d699-c523-4eb6-abd8-2f38b3386f45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.115127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "refresh_cache-f1230046-d368-40ee-b1fa-99df4ab15a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.115290] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "refresh_cache-f1230046-d368-40ee-b1fa-99df4ab15a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.115447] env[68437]: DEBUG nova.network.neutron [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 762.117437] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900f13ff-a2fd-4367-9347-cf08d78fba9a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.126418] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a45ab17-1fdf-4a63-a742-8d42509c4e39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.141132] env[68437]: DEBUG nova.compute.provider_tree [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.144082] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48ee5471-67d8-4cd7-980c-5f7ac65c6741 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.287s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.464871] env[68437]: INFO nova.compute.manager [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Took 36.51 seconds to build instance. [ 762.645422] env[68437]: DEBUG nova.scheduler.client.report [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.647966] env[68437]: DEBUG nova.compute.manager [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 762.668796] env[68437]: DEBUG nova.network.neutron [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 762.815124] env[68437]: DEBUG nova.network.neutron [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Updating instance_info_cache with network_info: [{"id": "c57512fa-960c-429b-97d3-2d33ecf31cf0", "address": "fa:16:3e:af:d2:f6", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc57512fa-96", "ovs_interfaceid": "c57512fa-960c-429b-97d3-2d33ecf31cf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.967566] env[68437]: DEBUG oslo_concurrency.lockutils [None req-af631868-7911-4547-b0ad-717210c9501f tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.900s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.153895] env[68437]: DEBUG oslo_concurrency.lockutils [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.158730] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 29.455s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.158730] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.158862] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 763.159172] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.391s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.160583] env[68437]: INFO nova.compute.claims [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.165897] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d162d92-c88f-4be9-8396-f7ac459ab605 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.177423] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9fed48-82da-40b3-a45f-47d5bc29c287 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.182316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.183272] env[68437]: INFO nova.scheduler.client.report [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleted allocations for instance a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f [ 763.196470] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e972b3cc-3d88-4b09-beee-de06d9182d75 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.208032] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4150c7a8-e886-40f7-a6e8-89bcef97e2fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.241811] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179923MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 763.242054] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.318572] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "refresh_cache-f1230046-d368-40ee-b1fa-99df4ab15a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.318952] env[68437]: DEBUG nova.compute.manager [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Instance network_info: |[{"id": "c57512fa-960c-429b-97d3-2d33ecf31cf0", "address": "fa:16:3e:af:d2:f6", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc57512fa-96", "ovs_interfaceid": "c57512fa-960c-429b-97d3-2d33ecf31cf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 763.319429] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:d2:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '101a44fc-ffde-4e3e-ad82-363454ae458b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c57512fa-960c-429b-97d3-2d33ecf31cf0', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 763.327328] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.327963] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 763.328233] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-703ba82c-748f-40ea-8932-71f276ba7e78 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.349437] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 763.349437] env[68437]: value = "task-2943932" [ 763.349437] env[68437]: _type = "Task" [ 763.349437] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.357892] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943932, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.470225] env[68437]: DEBUG nova.compute.manager [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 763.530701] env[68437]: DEBUG nova.compute.manager [req-4c7358ae-b5f2-45b5-b255-59d800ce290b req-22ef99c2-83dc-43e7-8569-9f84fc83dcfd service nova] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Received event network-changed-c57512fa-960c-429b-97d3-2d33ecf31cf0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 763.530860] env[68437]: DEBUG nova.compute.manager [req-4c7358ae-b5f2-45b5-b255-59d800ce290b req-22ef99c2-83dc-43e7-8569-9f84fc83dcfd service nova] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Refreshing instance network info cache due to event network-changed-c57512fa-960c-429b-97d3-2d33ecf31cf0. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 763.531124] env[68437]: DEBUG oslo_concurrency.lockutils [req-4c7358ae-b5f2-45b5-b255-59d800ce290b req-22ef99c2-83dc-43e7-8569-9f84fc83dcfd service nova] Acquiring lock "refresh_cache-f1230046-d368-40ee-b1fa-99df4ab15a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.531269] env[68437]: DEBUG oslo_concurrency.lockutils [req-4c7358ae-b5f2-45b5-b255-59d800ce290b req-22ef99c2-83dc-43e7-8569-9f84fc83dcfd service nova] Acquired lock "refresh_cache-f1230046-d368-40ee-b1fa-99df4ab15a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.531450] env[68437]: DEBUG nova.network.neutron [req-4c7358ae-b5f2-45b5-b255-59d800ce290b req-22ef99c2-83dc-43e7-8569-9f84fc83dcfd service nova] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Refreshing network info cache for port c57512fa-960c-429b-97d3-2d33ecf31cf0 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 763.562978] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquiring lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.563507] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.563786] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquiring lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.563998] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.564196] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.566415] env[68437]: INFO nova.compute.manager [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Terminating instance [ 763.699681] env[68437]: DEBUG oslo_concurrency.lockutils [None req-68ffa7f0-53c9-44c9-92f7-1b0a939bd9c1 tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.253s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.861071] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943932, 'name': CreateVM_Task, 'duration_secs': 0.317048} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.861071] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 763.861363] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.861527] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.861866] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 763.862134] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c32cab44-2c13-43b2-9606-db5e61974e11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.867089] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 763.867089] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a3feaf-5d27-db22-f927-a67760605ef7" [ 763.867089] env[68437]: _type = "Task" [ 763.867089] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.875500] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a3feaf-5d27-db22-f927-a67760605ef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.991335] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.070659] env[68437]: DEBUG nova.compute.manager [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 764.070900] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.071854] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07054f0c-96c1-4885-a93f-c780174741f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.080228] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 764.080480] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e24db49-aa63-48e2-9bdd-615d5387d46c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.087815] env[68437]: DEBUG oslo_vmware.api [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 764.087815] env[68437]: value = "task-2943933" [ 764.087815] env[68437]: _type = "Task" [ 764.087815] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.097157] env[68437]: DEBUG oslo_vmware.api [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943933, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.343217] env[68437]: DEBUG nova.network.neutron [req-4c7358ae-b5f2-45b5-b255-59d800ce290b req-22ef99c2-83dc-43e7-8569-9f84fc83dcfd service nova] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Updated VIF entry in instance network info cache for port c57512fa-960c-429b-97d3-2d33ecf31cf0. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 764.343642] env[68437]: DEBUG nova.network.neutron [req-4c7358ae-b5f2-45b5-b255-59d800ce290b req-22ef99c2-83dc-43e7-8569-9f84fc83dcfd service nova] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Updating instance_info_cache with network_info: [{"id": "c57512fa-960c-429b-97d3-2d33ecf31cf0", "address": "fa:16:3e:af:d2:f6", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc57512fa-96", "ovs_interfaceid": "c57512fa-960c-429b-97d3-2d33ecf31cf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.383769] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a3feaf-5d27-db22-f927-a67760605ef7, 'name': SearchDatastore_Task, 'duration_secs': 0.027228} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.383769] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.383769] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.383961] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.384084] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.384259] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.384524] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab4a83a1-8d91-4a60-94b0-beb89741aba6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.399597] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.399783] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.400566] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b6a151c-36f9-436f-843d-f9601b8c4263 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.409763] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 764.409763] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f6ae76-08a1-4801-582c-c58e5553bf2d" [ 764.409763] env[68437]: _type = "Task" [ 764.409763] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.422204] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f6ae76-08a1-4801-582c-c58e5553bf2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.598900] env[68437]: DEBUG oslo_vmware.api [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943933, 'name': PowerOffVM_Task, 'duration_secs': 0.328511} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.599143] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 764.599360] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 764.599781] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89850035-dd8e-43f5-a60b-3f6b13009c8a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.610063] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "0484ccee-f003-4101-87c5-fed92f095d2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.610280] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "0484ccee-f003-4101-87c5-fed92f095d2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.610490] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "0484ccee-f003-4101-87c5-fed92f095d2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.610669] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "0484ccee-f003-4101-87c5-fed92f095d2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.611140] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "0484ccee-f003-4101-87c5-fed92f095d2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.612915] env[68437]: INFO nova.compute.manager [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Terminating instance [ 764.684608] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.684823] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.685733] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Deleting the datastore file [datastore2] 56cfa7f3-12ad-42d0-a27f-ab8136a903ee {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.685733] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-992dddb9-5970-401e-a59f-1100d115477f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.689929] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295c31b6-7089-4b60-b1da-9239252a7e02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.693799] env[68437]: DEBUG oslo_vmware.api [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for the task: (returnval){ [ 764.693799] env[68437]: value = "task-2943935" [ 764.693799] env[68437]: _type = "Task" [ 764.693799] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.700079] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65021a5d-75c6-4f80-b9c4-9c94271d6016 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.705762] env[68437]: DEBUG oslo_vmware.api [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.732703] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8590991-2963-406e-a5e4-55db62deede7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.740979] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89758e0f-4926-4d29-81ed-e59741e47806 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.755037] env[68437]: DEBUG nova.compute.provider_tree [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.846946] env[68437]: DEBUG oslo_concurrency.lockutils [req-4c7358ae-b5f2-45b5-b255-59d800ce290b req-22ef99c2-83dc-43e7-8569-9f84fc83dcfd service nova] Releasing lock "refresh_cache-f1230046-d368-40ee-b1fa-99df4ab15a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.923475] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f6ae76-08a1-4801-582c-c58e5553bf2d, 'name': SearchDatastore_Task, 'duration_secs': 0.03285} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.924280] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d52b61a-f205-4826-b6ad-3bdd3508a9c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.929654] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 764.929654] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5297f360-89cf-c4e1-3091-e97b750f4430" [ 764.929654] env[68437]: _type = "Task" [ 764.929654] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.937440] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5297f360-89cf-c4e1-3091-e97b750f4430, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.117404] env[68437]: DEBUG nova.compute.manager [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 765.117699] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 765.118534] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34dcfd2-294c-46bd-a2d1-bd19d7ec125f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.126926] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 765.127139] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a543719-adb5-4e58-a69a-855b6356d15a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.135125] env[68437]: DEBUG oslo_vmware.api [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 765.135125] env[68437]: value = "task-2943936" [ 765.135125] env[68437]: _type = "Task" [ 765.135125] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.143422] env[68437]: DEBUG oslo_vmware.api [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.206963] env[68437]: DEBUG oslo_vmware.api [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Task: {'id': task-2943935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131491} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.207267] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.207594] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.207791] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.207969] env[68437]: INFO nova.compute.manager [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Took 1.14 seconds to destroy the instance on the hypervisor. [ 765.208229] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.208425] env[68437]: DEBUG nova.compute.manager [-] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.208519] env[68437]: DEBUG nova.network.neutron [-] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 765.258731] env[68437]: DEBUG nova.scheduler.client.report [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 765.443031] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5297f360-89cf-c4e1-3091-e97b750f4430, 'name': SearchDatastore_Task, 'duration_secs': 0.009946} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.443031] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.443031] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f1230046-d368-40ee-b1fa-99df4ab15a10/f1230046-d368-40ee-b1fa-99df4ab15a10.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 765.443031] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af5bab57-1f3d-4b3d-90c3-7b433a59cdef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.449662] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 765.449662] env[68437]: value = "task-2943937" [ 765.449662] env[68437]: _type = "Task" [ 765.449662] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.458703] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.593043] env[68437]: DEBUG nova.compute.manager [req-42b6b4b6-6a48-41ad-b521-88d5e8503c76 req-88050fc0-7438-4fcb-95b4-70faa90b1bfe service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Received event network-vif-deleted-175b0028-1953-4195-b2a1-ec9d791f429e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 765.593272] env[68437]: INFO nova.compute.manager [req-42b6b4b6-6a48-41ad-b521-88d5e8503c76 req-88050fc0-7438-4fcb-95b4-70faa90b1bfe service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Neutron deleted interface 175b0028-1953-4195-b2a1-ec9d791f429e; detaching it from the instance and deleting it from the info cache [ 765.593446] env[68437]: DEBUG nova.network.neutron [req-42b6b4b6-6a48-41ad-b521-88d5e8503c76 req-88050fc0-7438-4fcb-95b4-70faa90b1bfe service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.646597] env[68437]: DEBUG oslo_vmware.api [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943936, 'name': PowerOffVM_Task, 'duration_secs': 0.186763} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.646985] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 765.647196] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 765.647486] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c588145-660e-400c-b6b5-6c6265e7580d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.725998] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 765.726196] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 765.726396] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleting the datastore file [datastore1] 0484ccee-f003-4101-87c5-fed92f095d2d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 765.726715] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed6dd92e-bddf-4c41-a881-5cc489040f2c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.736833] env[68437]: DEBUG oslo_vmware.api [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for the task: (returnval){ [ 765.736833] env[68437]: value = "task-2943939" [ 765.736833] env[68437]: _type = "Task" [ 765.736833] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.749490] env[68437]: DEBUG oslo_vmware.api [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943939, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.765250] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.606s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.765780] env[68437]: DEBUG nova.compute.manager [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 765.768950] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.029s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.769262] env[68437]: DEBUG nova.objects.instance [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 765.961279] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489131} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.961560] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f1230046-d368-40ee-b1fa-99df4ab15a10/f1230046-d368-40ee-b1fa-99df4ab15a10.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.961781] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.962069] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de04ce6e-d7f8-4e08-bdb0-a1a122985023 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.968983] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 765.968983] env[68437]: value = "task-2943940" [ 765.968983] env[68437]: _type = "Task" [ 765.968983] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.978053] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943940, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.050915] env[68437]: DEBUG nova.network.neutron [-] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.098451] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86a19261-a033-4ab9-8ef6-8bd6fe943f1d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.112544] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4dd7bc9-0cc9-4a6e-a17d-efb1623b568a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.157745] env[68437]: DEBUG nova.compute.manager [req-42b6b4b6-6a48-41ad-b521-88d5e8503c76 req-88050fc0-7438-4fcb-95b4-70faa90b1bfe service nova] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Detach interface failed, port_id=175b0028-1953-4195-b2a1-ec9d791f429e, reason: Instance 56cfa7f3-12ad-42d0-a27f-ab8136a903ee could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 766.249102] env[68437]: DEBUG oslo_vmware.api [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943939, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.277187] env[68437]: DEBUG nova.compute.utils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 766.278700] env[68437]: DEBUG nova.compute.manager [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 766.278970] env[68437]: DEBUG nova.network.neutron [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 766.324925] env[68437]: DEBUG nova.policy [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cf9b0dcb1774cc486f4168c1cea40d6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8fb35616c64449f9a7b85f2d7e7d3c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 766.479170] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943940, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063906} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.479561] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 766.480367] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc7b104-a7b1-4568-b1cf-31f98689fcc4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.503188] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] f1230046-d368-40ee-b1fa-99df4ab15a10/f1230046-d368-40ee-b1fa-99df4ab15a10.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.503586] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d2e6b70-7235-4788-b3c0-b27ed1a790b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.525160] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 766.525160] env[68437]: value = "task-2943941" [ 766.525160] env[68437]: _type = "Task" [ 766.525160] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.534052] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943941, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.553410] env[68437]: INFO nova.compute.manager [-] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Took 1.34 seconds to deallocate network for instance. [ 766.649568] env[68437]: DEBUG nova.network.neutron [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Successfully created port: c5011eec-0913-4b23-b71b-534a51e35fd4 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.749403] env[68437]: DEBUG oslo_vmware.api [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Task: {'id': task-2943939, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.534823} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.749749] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 766.749980] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 766.750219] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 766.750436] env[68437]: INFO nova.compute.manager [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Took 1.63 seconds to destroy the instance on the hypervisor. [ 766.750718] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 766.750943] env[68437]: DEBUG nova.compute.manager [-] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 766.751321] env[68437]: DEBUG nova.network.neutron [-] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 766.780795] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03393d21-8920-4466-9b7d-2d0528d781d4 tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.781449] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 25.897s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.783108] env[68437]: DEBUG nova.compute.manager [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 767.036347] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943941, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.059706] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.296172] env[68437]: DEBUG nova.compute.manager [req-f1fc2546-5a9a-4bc8-8492-3ca923392fe2 req-3bf9807a-337e-4649-99b9-a4820c30fd15 service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Received event network-vif-deleted-037e71f7-843b-4af0-ad1e-1289fc16a69b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 767.296172] env[68437]: INFO nova.compute.manager [req-f1fc2546-5a9a-4bc8-8492-3ca923392fe2 req-3bf9807a-337e-4649-99b9-a4820c30fd15 service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Neutron deleted interface 037e71f7-843b-4af0-ad1e-1289fc16a69b; detaching it from the instance and deleting it from the info cache [ 767.296172] env[68437]: DEBUG nova.network.neutron [req-f1fc2546-5a9a-4bc8-8492-3ca923392fe2 req-3bf9807a-337e-4649-99b9-a4820c30fd15 service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.514325] env[68437]: DEBUG nova.network.neutron [-] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.539029] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943941, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.736519] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e94cea-2978-4fd8-a18c-f62452bcb54e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.744914] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf746c0a-7a12-4303-8b18-16cf9a881267 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.775306] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9057e6-acfb-4db9-9637-2938ff719774 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.782902] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb12cd5a-c531-4076-ad03-aae993227e96 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.801317] env[68437]: DEBUG nova.compute.manager [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 767.803746] env[68437]: DEBUG nova.compute.provider_tree [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.805920] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0682c8d5-22f7-47a5-94d4-b6f0f59e2e37 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.816790] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cffc07-1c51-4d7e-914f-9fce111aa4ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.837480] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.837729] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.837972] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.838190] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.838338] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.838491] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.838695] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.838856] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.839031] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.839199] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.839368] env[68437]: DEBUG nova.virt.hardware [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.840888] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e073239a-a0b5-4c43-8a76-82d27fa8cc38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.849105] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a98158-5f34-4b32-b8fa-fd5742c0c98b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.866175] env[68437]: DEBUG nova.compute.manager [req-f1fc2546-5a9a-4bc8-8492-3ca923392fe2 req-3bf9807a-337e-4649-99b9-a4820c30fd15 service nova] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Detach interface failed, port_id=037e71f7-843b-4af0-ad1e-1289fc16a69b, reason: Instance 0484ccee-f003-4101-87c5-fed92f095d2d could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 768.016618] env[68437]: INFO nova.compute.manager [-] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Took 1.27 seconds to deallocate network for instance. [ 768.037178] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943941, 'name': ReconfigVM_Task, 'duration_secs': 1.291133} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.037439] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Reconfigured VM instance instance-0000001f to attach disk [datastore1] f1230046-d368-40ee-b1fa-99df4ab15a10/f1230046-d368-40ee-b1fa-99df4ab15a10.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 768.038095] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-576c92fc-c0d2-41f2-b0db-1705eb649c25 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.045226] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 768.045226] env[68437]: value = "task-2943942" [ 768.045226] env[68437]: _type = "Task" [ 768.045226] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.054136] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943942, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.307769] env[68437]: DEBUG nova.scheduler.client.report [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 768.469691] env[68437]: DEBUG nova.compute.manager [req-3a31ea62-5656-4eab-945d-bc42fa9e9dad req-d147a062-bdc0-4bb7-9951-8e8c3593c9ee service nova] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Received event network-vif-plugged-c5011eec-0913-4b23-b71b-534a51e35fd4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 768.469691] env[68437]: DEBUG oslo_concurrency.lockutils [req-3a31ea62-5656-4eab-945d-bc42fa9e9dad req-d147a062-bdc0-4bb7-9951-8e8c3593c9ee service nova] Acquiring lock "e3855111-7678-42c5-a37e-25e8587416aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.469691] env[68437]: DEBUG oslo_concurrency.lockutils [req-3a31ea62-5656-4eab-945d-bc42fa9e9dad req-d147a062-bdc0-4bb7-9951-8e8c3593c9ee service nova] Lock "e3855111-7678-42c5-a37e-25e8587416aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.469691] env[68437]: DEBUG oslo_concurrency.lockutils [req-3a31ea62-5656-4eab-945d-bc42fa9e9dad req-d147a062-bdc0-4bb7-9951-8e8c3593c9ee service nova] Lock "e3855111-7678-42c5-a37e-25e8587416aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.469996] env[68437]: DEBUG nova.compute.manager [req-3a31ea62-5656-4eab-945d-bc42fa9e9dad req-d147a062-bdc0-4bb7-9951-8e8c3593c9ee service nova] [instance: e3855111-7678-42c5-a37e-25e8587416aa] No waiting events found dispatching network-vif-plugged-c5011eec-0913-4b23-b71b-534a51e35fd4 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 768.474351] env[68437]: WARNING nova.compute.manager [req-3a31ea62-5656-4eab-945d-bc42fa9e9dad req-d147a062-bdc0-4bb7-9951-8e8c3593c9ee service nova] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Received unexpected event network-vif-plugged-c5011eec-0913-4b23-b71b-534a51e35fd4 for instance with vm_state building and task_state spawning. [ 768.523189] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.556584] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943942, 'name': Rename_Task, 'duration_secs': 0.157323} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.556867] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 768.557128] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb3f6ba8-61a7-4db1-ba55-a4aea7c4ac70 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.564458] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 768.564458] env[68437]: value = "task-2943943" [ 768.564458] env[68437]: _type = "Task" [ 768.564458] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.572737] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943943, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.643229] env[68437]: DEBUG nova.network.neutron [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Successfully updated port: c5011eec-0913-4b23-b71b-534a51e35fd4 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 769.078090] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943943, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.150213] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "refresh_cache-e3855111-7678-42c5-a37e-25e8587416aa" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.150213] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "refresh_cache-e3855111-7678-42c5-a37e-25e8587416aa" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.150213] env[68437]: DEBUG nova.network.neutron [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 769.320540] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.539s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.324013] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.480s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.325510] env[68437]: INFO nova.compute.claims [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.576516] env[68437]: DEBUG oslo_vmware.api [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943943, 'name': PowerOnVM_Task, 'duration_secs': 0.604712} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.576786] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 769.577010] env[68437]: INFO nova.compute.manager [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Took 8.65 seconds to spawn the instance on the hypervisor. [ 769.577194] env[68437]: DEBUG nova.compute.manager [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.578050] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319d11df-dc02-4434-8cc0-86100a62baaf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.685221] env[68437]: DEBUG nova.network.neutron [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 769.892552] env[68437]: INFO nova.scheduler.client.report [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Deleted allocation for migration 57158733-20f2-4deb-8251-47df03d5e04b [ 769.919531] env[68437]: DEBUG nova.network.neutron [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Updating instance_info_cache with network_info: [{"id": "c5011eec-0913-4b23-b71b-534a51e35fd4", "address": "fa:16:3e:28:f2:42", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5011eec-09", "ovs_interfaceid": "c5011eec-0913-4b23-b71b-534a51e35fd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.093595] env[68437]: INFO nova.compute.manager [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Took 40.30 seconds to build instance. [ 770.403924] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6638f4c-09d5-4645-b232-752df817466b tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 32.741s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.424863] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "refresh_cache-e3855111-7678-42c5-a37e-25e8587416aa" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.425425] env[68437]: DEBUG nova.compute.manager [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Instance network_info: |[{"id": "c5011eec-0913-4b23-b71b-534a51e35fd4", "address": "fa:16:3e:28:f2:42", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5011eec-09", "ovs_interfaceid": "c5011eec-0913-4b23-b71b-534a51e35fd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 770.427820] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:f2:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '101a44fc-ffde-4e3e-ad82-363454ae458b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5011eec-0913-4b23-b71b-534a51e35fd4', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 770.436380] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 770.436928] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 770.440297] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34bef4d9-0bc5-405f-b6c1-450835bcdd68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.463581] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 770.463581] env[68437]: value = "task-2943944" [ 770.463581] env[68437]: _type = "Task" [ 770.463581] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.474592] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943944, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.528932] env[68437]: DEBUG nova.compute.manager [req-2214fc10-efc1-4700-a025-f70dd95e6dcb req-3c6e4377-d08e-4cb3-88d9-8c62d28b5ec9 service nova] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Received event network-changed-c5011eec-0913-4b23-b71b-534a51e35fd4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 770.529137] env[68437]: DEBUG nova.compute.manager [req-2214fc10-efc1-4700-a025-f70dd95e6dcb req-3c6e4377-d08e-4cb3-88d9-8c62d28b5ec9 service nova] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Refreshing instance network info cache due to event network-changed-c5011eec-0913-4b23-b71b-534a51e35fd4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 770.529383] env[68437]: DEBUG oslo_concurrency.lockutils [req-2214fc10-efc1-4700-a025-f70dd95e6dcb req-3c6e4377-d08e-4cb3-88d9-8c62d28b5ec9 service nova] Acquiring lock "refresh_cache-e3855111-7678-42c5-a37e-25e8587416aa" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.529483] env[68437]: DEBUG oslo_concurrency.lockutils [req-2214fc10-efc1-4700-a025-f70dd95e6dcb req-3c6e4377-d08e-4cb3-88d9-8c62d28b5ec9 service nova] Acquired lock "refresh_cache-e3855111-7678-42c5-a37e-25e8587416aa" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.529643] env[68437]: DEBUG nova.network.neutron [req-2214fc10-efc1-4700-a025-f70dd95e6dcb req-3c6e4377-d08e-4cb3-88d9-8c62d28b5ec9 service nova] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Refreshing network info cache for port c5011eec-0913-4b23-b71b-534a51e35fd4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 770.595987] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c68f04e-9ac8-4c68-9368-c61f5eb95152 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "f1230046-d368-40ee-b1fa-99df4ab15a10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.855s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.882416] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a12b20a-abaa-46f7-87f4-62d688845963 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.890803] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e55119b-6588-4e48-af84-15227c765c9f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.921447] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5fff56-156e-4246-84dd-dc83960530c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.931965] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794206df-7d98-4c5d-b0ee-39bc8f28816e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.944016] env[68437]: DEBUG nova.compute.provider_tree [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.974326] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943944, 'name': CreateVM_Task, 'duration_secs': 0.353751} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.974326] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.974616] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.974714] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.975062] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 770.975322] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a83d139-7e37-44cc-83c7-2089f00c4405 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.981421] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 770.981421] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528562ce-e289-dbba-929e-2a9538b5c965" [ 770.981421] env[68437]: _type = "Task" [ 770.981421] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.990172] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528562ce-e289-dbba-929e-2a9538b5c965, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.099651] env[68437]: DEBUG nova.compute.manager [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 771.233184] env[68437]: DEBUG nova.network.neutron [req-2214fc10-efc1-4700-a025-f70dd95e6dcb req-3c6e4377-d08e-4cb3-88d9-8c62d28b5ec9 service nova] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Updated VIF entry in instance network info cache for port c5011eec-0913-4b23-b71b-534a51e35fd4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 771.233558] env[68437]: DEBUG nova.network.neutron [req-2214fc10-efc1-4700-a025-f70dd95e6dcb req-3c6e4377-d08e-4cb3-88d9-8c62d28b5ec9 service nova] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Updating instance_info_cache with network_info: [{"id": "c5011eec-0913-4b23-b71b-534a51e35fd4", "address": "fa:16:3e:28:f2:42", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5011eec-09", "ovs_interfaceid": "c5011eec-0913-4b23-b71b-534a51e35fd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.446558] env[68437]: DEBUG nova.scheduler.client.report [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.495016] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528562ce-e289-dbba-929e-2a9538b5c965, 'name': SearchDatastore_Task, 'duration_secs': 0.012707} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.495335] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.495568] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.495808] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.495969] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.496168] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.496431] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2894aaf8-701d-4530-aa70-ce404b55047a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.506235] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.506422] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 771.507176] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab0d2a34-6585-411d-a824-234682a46a71 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.512786] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 771.512786] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ef033b-4602-c4a2-a85e-4faf64ec8733" [ 771.512786] env[68437]: _type = "Task" [ 771.512786] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.521414] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ef033b-4602-c4a2-a85e-4faf64ec8733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.621540] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.654487] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "b7706bf2-936f-439c-8e9f-b2241d0c211c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.654780] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.736264] env[68437]: DEBUG oslo_concurrency.lockutils [req-2214fc10-efc1-4700-a025-f70dd95e6dcb req-3c6e4377-d08e-4cb3-88d9-8c62d28b5ec9 service nova] Releasing lock "refresh_cache-e3855111-7678-42c5-a37e-25e8587416aa" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.951953] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.952554] env[68437]: DEBUG nova.compute.manager [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 771.955311] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.548s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.956744] env[68437]: INFO nova.compute.claims [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.023419] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ef033b-4602-c4a2-a85e-4faf64ec8733, 'name': SearchDatastore_Task, 'duration_secs': 0.014787} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.024137] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95a57fa5-cb19-4fec-a01c-4deb20ea6725 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.031111] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 772.031111] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d5e5f5-0d14-eb16-196d-29520f7cedf6" [ 772.031111] env[68437]: _type = "Task" [ 772.031111] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.039447] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d5e5f5-0d14-eb16-196d-29520f7cedf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.463874] env[68437]: DEBUG nova.compute.utils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 772.465313] env[68437]: DEBUG nova.compute.manager [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 772.465479] env[68437]: DEBUG nova.network.neutron [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 772.529262] env[68437]: DEBUG nova.policy [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9cb5c1cd965a4825aa6c7727a5ccd481', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 772.543261] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d5e5f5-0d14-eb16-196d-29520f7cedf6, 'name': SearchDatastore_Task, 'duration_secs': 0.028874} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.543528] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.543790] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] e3855111-7678-42c5-a37e-25e8587416aa/e3855111-7678-42c5-a37e-25e8587416aa.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 772.544059] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c38c838d-fe19-4d09-b70d-beefdf8be4a8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.551573] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 772.551573] env[68437]: value = "task-2943945" [ 772.551573] env[68437]: _type = "Task" [ 772.551573] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.559921] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943945, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.969173] env[68437]: DEBUG nova.compute.manager [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 773.066803] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943945, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.112634] env[68437]: DEBUG nova.network.neutron [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Successfully created port: c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.492659] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fabe67-3268-4e59-af8d-e682a3eb0ca0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.503538] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d719747-5371-46e2-b000-f57c04d38d02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.537903] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c7027c-0ce2-43cf-a882-706a5df47389 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.547421] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27bc060c-6221-4b69-b41a-53379a05390a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.564219] env[68437]: DEBUG nova.compute.provider_tree [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.571535] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943945, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593457} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.571873] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] e3855111-7678-42c5-a37e-25e8587416aa/e3855111-7678-42c5-a37e-25e8587416aa.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 773.572074] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.572302] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bbdce837-8512-4dd1-906b-30ade9e94119 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.580609] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 773.580609] env[68437]: value = "task-2943946" [ 773.580609] env[68437]: _type = "Task" [ 773.580609] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.591417] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.980407] env[68437]: DEBUG nova.compute.manager [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 774.016461] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 774.016647] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 774.016956] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 774.017328] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 774.017882] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 774.017882] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 774.018327] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 774.018666] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 774.019133] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 774.019475] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 774.019844] env[68437]: DEBUG nova.virt.hardware [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 774.021315] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1eb8c0-c069-4764-bc1f-37c747b08c34 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.035287] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fc2463-a0d6-40c7-a546-70bc32663c4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.069678] env[68437]: DEBUG nova.scheduler.client.report [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.093956] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.33456} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.095350] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 774.096548] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d36527-b7b7-4da8-a8a7-0779aa35005a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.124558] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] e3855111-7678-42c5-a37e-25e8587416aa/e3855111-7678-42c5-a37e-25e8587416aa.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.125287] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf2cc461-bb12-4ab4-a287-326ed13efe80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.146853] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 774.146853] env[68437]: value = "task-2943947" [ 774.146853] env[68437]: _type = "Task" [ 774.146853] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.157137] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943947, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.577548] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.578328] env[68437]: DEBUG nova.compute.manager [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 774.582196] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.630s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.584338] env[68437]: INFO nova.compute.claims [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 774.615794] env[68437]: DEBUG nova.compute.manager [req-814cbb0f-412e-4b0c-968f-32286726b3d2 req-f222f480-ab9a-4834-a299-1e6baa9bc4a4 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Received event network-vif-plugged-c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 774.615900] env[68437]: DEBUG oslo_concurrency.lockutils [req-814cbb0f-412e-4b0c-968f-32286726b3d2 req-f222f480-ab9a-4834-a299-1e6baa9bc4a4 service nova] Acquiring lock "39c532b1-b05e-4354-ad8f-9223b06e9488-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.616116] env[68437]: DEBUG oslo_concurrency.lockutils [req-814cbb0f-412e-4b0c-968f-32286726b3d2 req-f222f480-ab9a-4834-a299-1e6baa9bc4a4 service nova] Lock "39c532b1-b05e-4354-ad8f-9223b06e9488-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.616294] env[68437]: DEBUG oslo_concurrency.lockutils [req-814cbb0f-412e-4b0c-968f-32286726b3d2 req-f222f480-ab9a-4834-a299-1e6baa9bc4a4 service nova] Lock "39c532b1-b05e-4354-ad8f-9223b06e9488-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.616494] env[68437]: DEBUG nova.compute.manager [req-814cbb0f-412e-4b0c-968f-32286726b3d2 req-f222f480-ab9a-4834-a299-1e6baa9bc4a4 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] No waiting events found dispatching network-vif-plugged-c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 774.616673] env[68437]: WARNING nova.compute.manager [req-814cbb0f-412e-4b0c-968f-32286726b3d2 req-f222f480-ab9a-4834-a299-1e6baa9bc4a4 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Received unexpected event network-vif-plugged-c09d45df-fef7-4b7f-ac2b-cea270301ba4 for instance with vm_state building and task_state spawning. [ 774.666097] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.792716] env[68437]: DEBUG nova.network.neutron [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Successfully updated port: c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 775.092429] env[68437]: DEBUG nova.compute.utils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 775.094040] env[68437]: DEBUG nova.compute.manager [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 775.157939] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943947, 'name': ReconfigVM_Task, 'duration_secs': 0.646855} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.158186] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Reconfigured VM instance instance-00000020 to attach disk [datastore1] e3855111-7678-42c5-a37e-25e8587416aa/e3855111-7678-42c5-a37e-25e8587416aa.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.158795] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4900ea7-2bb1-42b5-8b8b-02e6af37150e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.166625] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 775.166625] env[68437]: value = "task-2943948" [ 775.166625] env[68437]: _type = "Task" [ 775.166625] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.175196] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943948, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.302122] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.302297] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquired lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.302498] env[68437]: DEBUG nova.network.neutron [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 775.598357] env[68437]: DEBUG nova.compute.manager [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 775.678191] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943948, 'name': Rename_Task, 'duration_secs': 0.163044} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.679987] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.680959] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12158568-a22e-40f7-9fc0-8ed0dc4dd70a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.688666] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 775.688666] env[68437]: value = "task-2943949" [ 775.688666] env[68437]: _type = "Task" [ 775.688666] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.705703] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.860808] env[68437]: DEBUG nova.network.neutron [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 776.103064] env[68437]: DEBUG nova.network.neutron [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updating instance_info_cache with network_info: [{"id": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "address": "fa:16:3e:ec:45:4e", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09d45df-fe", "ovs_interfaceid": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.140320] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a3390e-381d-4e76-8ec1-d699bb62f044 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.154030] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd195e9d-2397-445a-9710-33b0b955ef57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.187208] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d6dcfa-51fb-4867-8649-349b1a33a385 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.200964] env[68437]: DEBUG oslo_vmware.api [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943949, 'name': PowerOnVM_Task, 'duration_secs': 0.505776} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.201338] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 776.201550] env[68437]: INFO nova.compute.manager [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Took 8.40 seconds to spawn the instance on the hypervisor. [ 776.201726] env[68437]: DEBUG nova.compute.manager [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 776.202997] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b07f65-5afc-4e92-ab3f-9e98057f0057 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.207316] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c9875e-aa94-47c7-9c26-435dbf86a739 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.223297] env[68437]: DEBUG nova.compute.provider_tree [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.611646] env[68437]: DEBUG nova.compute.manager [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 776.613931] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Releasing lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.614231] env[68437]: DEBUG nova.compute.manager [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Instance network_info: |[{"id": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "address": "fa:16:3e:ec:45:4e", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09d45df-fe", "ovs_interfaceid": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 776.614854] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:45:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aec0089a-ff85-4bef-bad8-c84de39af71a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c09d45df-fef7-4b7f-ac2b-cea270301ba4', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.622501] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Creating folder: Project (3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.622855] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99f614f7-533e-4ad9-80f2-6a434a8d0595 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.636173] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Created folder: Project (3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b) in parent group-v590848. [ 776.636511] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Creating folder: Instances. Parent ref: group-v590942. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.636862] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c1873f2-22da-46f0-bd06-042b2293002a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.642399] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 776.642754] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.642996] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 776.643342] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.643603] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 776.643857] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 776.644230] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 776.644519] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 776.644820] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 776.645145] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 776.645438] env[68437]: DEBUG nova.virt.hardware [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 776.646637] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11ffb15-2c9f-43c2-9941-31f945387ba0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.652156] env[68437]: DEBUG nova.compute.manager [req-08efe765-7d9d-4fd3-9efc-921d275adb0a req-1c2348f9-b79e-4292-bb52-9d97934b1d15 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Received event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 776.652350] env[68437]: DEBUG nova.compute.manager [req-08efe765-7d9d-4fd3-9efc-921d275adb0a req-1c2348f9-b79e-4292-bb52-9d97934b1d15 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing instance network info cache due to event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 776.652557] env[68437]: DEBUG oslo_concurrency.lockutils [req-08efe765-7d9d-4fd3-9efc-921d275adb0a req-1c2348f9-b79e-4292-bb52-9d97934b1d15 service nova] Acquiring lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.652696] env[68437]: DEBUG oslo_concurrency.lockutils [req-08efe765-7d9d-4fd3-9efc-921d275adb0a req-1c2348f9-b79e-4292-bb52-9d97934b1d15 service nova] Acquired lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.652857] env[68437]: DEBUG nova.network.neutron [req-08efe765-7d9d-4fd3-9efc-921d275adb0a req-1c2348f9-b79e-4292-bb52-9d97934b1d15 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 776.659763] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d07529-ad7d-4747-844a-2cb6d762c933 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.665279] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Created folder: Instances in parent group-v590942. [ 776.665515] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.666240] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 776.666737] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ade9a4a4-3980-4aa7-935a-31b1652b5616 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.688533] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.694019] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Creating folder: Project (2460a28ebd294378a8f72413b31f0e62). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.694982] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e507bee-d98f-470a-b3e5-ea23dfd79e23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.702512] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.702512] env[68437]: value = "task-2943952" [ 776.702512] env[68437]: _type = "Task" [ 776.702512] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.708356] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Created folder: Project (2460a28ebd294378a8f72413b31f0e62) in parent group-v590848. [ 776.708606] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Creating folder: Instances. Parent ref: group-v590944. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.708838] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b50b57d-ac11-4dde-b502-00d8ca157aaf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.713372] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943952, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.723483] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Created folder: Instances in parent group-v590944. [ 776.723676] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.723884] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 776.724189] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13191410-c9a4-4613-9da5-e8ca8d6e83c7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.738018] env[68437]: DEBUG nova.scheduler.client.report [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 776.751575] env[68437]: INFO nova.compute.manager [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Took 41.01 seconds to build instance. [ 776.756560] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.756560] env[68437]: value = "task-2943955" [ 776.756560] env[68437]: _type = "Task" [ 776.756560] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.767350] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943955, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.212851] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943952, 'name': CreateVM_Task, 'duration_secs': 0.390155} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.213174] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 777.213802] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.213974] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.214312] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 777.214565] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee24b50a-f627-4683-8a72-c9f7a9f737aa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.223163] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 777.223163] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c607c7-379f-5890-f128-c7902b4c017b" [ 777.223163] env[68437]: _type = "Task" [ 777.223163] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.228727] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c607c7-379f-5890-f128-c7902b4c017b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.249025] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.249025] env[68437]: DEBUG nova.compute.manager [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 777.249804] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.288s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.252965] env[68437]: DEBUG nova.objects.instance [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lazy-loading 'resources' on Instance uuid d7c64aa1-44f8-44f4-9fb6-463033837736 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.255632] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2120afa0-8031-40f8-89ce-287439bdc0e7 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "e3855111-7678-42c5-a37e-25e8587416aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.068s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.266912] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943955, 'name': CreateVM_Task, 'duration_secs': 0.327483} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.267118] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 777.267719] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.688202] env[68437]: DEBUG nova.network.neutron [req-08efe765-7d9d-4fd3-9efc-921d275adb0a req-1c2348f9-b79e-4292-bb52-9d97934b1d15 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updated VIF entry in instance network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 777.688584] env[68437]: DEBUG nova.network.neutron [req-08efe765-7d9d-4fd3-9efc-921d275adb0a req-1c2348f9-b79e-4292-bb52-9d97934b1d15 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updating instance_info_cache with network_info: [{"id": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "address": "fa:16:3e:ec:45:4e", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09d45df-fe", "ovs_interfaceid": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.730906] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c607c7-379f-5890-f128-c7902b4c017b, 'name': SearchDatastore_Task, 'duration_secs': 0.011845} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.731220] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.731459] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 777.732030] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.732030] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.732030] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 777.732305] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.732621] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 777.732842] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0825612d-5cd2-4146-8b92-acd9aec815d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.734858] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88e2f4e7-e61f-431c-b43f-2cf9b47ebed0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.741094] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 777.741094] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520531c8-6322-ad4f-1687-a9778a666674" [ 777.741094] env[68437]: _type = "Task" [ 777.741094] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.745136] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 777.745337] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 777.746345] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dc3be28-9c64-4389-b184-51c123b8f681 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.752373] env[68437]: DEBUG nova.compute.utils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 777.755875] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520531c8-6322-ad4f-1687-a9778a666674, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.759019] env[68437]: DEBUG nova.compute.manager [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 777.759019] env[68437]: DEBUG nova.network.neutron [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 777.759019] env[68437]: DEBUG nova.compute.manager [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 777.763341] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 777.763341] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525816af-fcf1-cf83-4312-362e4d864f92" [ 777.763341] env[68437]: _type = "Task" [ 777.763341] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.773152] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525816af-fcf1-cf83-4312-362e4d864f92, 'name': SearchDatastore_Task, 'duration_secs': 0.009285} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.774663] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-594ca284-9d6a-44a7-a691-ae719fb01ba3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.785337] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 777.785337] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523c4a09-4a63-906c-1f0d-12d242bea100" [ 777.785337] env[68437]: _type = "Task" [ 777.785337] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.804624] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523c4a09-4a63-906c-1f0d-12d242bea100, 'name': SearchDatastore_Task, 'duration_secs': 0.010477} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.806064] env[68437]: DEBUG nova.policy [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '227473a85c3242229bd559b521dd0023', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d7b3b9e525e494d896b8d6e874c3e8b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 777.807740] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.808012] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 39c532b1-b05e-4354-ad8f-9223b06e9488/39c532b1-b05e-4354-ad8f-9223b06e9488.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 777.810828] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b61a07b-3419-401e-ba32-d90f00b11c9d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.820411] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 777.820411] env[68437]: value = "task-2943956" [ 777.820411] env[68437]: _type = "Task" [ 777.820411] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.829587] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943956, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.191187] env[68437]: DEBUG oslo_concurrency.lockutils [req-08efe765-7d9d-4fd3-9efc-921d275adb0a req-1c2348f9-b79e-4292-bb52-9d97934b1d15 service nova] Releasing lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.254753] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520531c8-6322-ad4f-1687-a9778a666674, 'name': SearchDatastore_Task, 'duration_secs': 0.010706} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.258593] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.258901] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.259184] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.259340] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.259595] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.260206] env[68437]: DEBUG nova.compute.manager [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 778.264157] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bf721d5-3f04-47f0-89df-b1c4bd4324db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.280809] env[68437]: DEBUG nova.network.neutron [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Successfully created port: 00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.283991] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.284275] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.285289] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b678b552-c419-4991-8118-ee09f52cbb2c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.292264] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 778.292264] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e90dde-0527-f85a-b788-f775ca0ba2fd" [ 778.292264] env[68437]: _type = "Task" [ 778.292264] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.301468] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e90dde-0527-f85a-b788-f775ca0ba2fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.302278] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.329062] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74eea2f4-092f-4cbc-b1d9-e07c2b8f233e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.332799] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943956, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469828} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.333413] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 39c532b1-b05e-4354-ad8f-9223b06e9488/39c532b1-b05e-4354-ad8f-9223b06e9488.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 778.333632] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 778.333872] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a820653-cd87-4817-abea-818f0347e9d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.339094] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793eecda-63a5-40ed-bdd9-2d11b39f3b5a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.343143] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 778.343143] env[68437]: value = "task-2943957" [ 778.343143] env[68437]: _type = "Task" [ 778.343143] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.373216] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30102650-3a0e-465b-bb7a-89d260214765 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.378875] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943957, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.384432] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94cb2e84-9bb1-4b0e-bccb-8912638650fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.398695] env[68437]: DEBUG nova.compute.provider_tree [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.803329] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e90dde-0527-f85a-b788-f775ca0ba2fd, 'name': SearchDatastore_Task, 'duration_secs': 0.011816} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.804176] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b2899d6-7819-48ea-886b-f71584b74f1b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.812090] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 778.812090] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526d54fe-0ac7-4423-7b05-66d65796be6c" [ 778.812090] env[68437]: _type = "Task" [ 778.812090] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.820030] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526d54fe-0ac7-4423-7b05-66d65796be6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.853302] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943957, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.209199} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.853593] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 778.854412] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034f2050-e9a5-4326-b078-4a66c5349581 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.876877] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 39c532b1-b05e-4354-ad8f-9223b06e9488/39c532b1-b05e-4354-ad8f-9223b06e9488.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.877461] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cd218a2-5c30-411d-875d-2f627a50e31e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.898583] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 778.898583] env[68437]: value = "task-2943958" [ 778.898583] env[68437]: _type = "Task" [ 778.898583] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.902488] env[68437]: DEBUG nova.scheduler.client.report [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 778.911355] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943958, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.280385] env[68437]: DEBUG nova.compute.manager [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 779.306849] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 779.307127] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.307295] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 779.307895] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.307990] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 779.308225] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 779.308531] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 779.309828] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 779.309828] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 779.309961] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 779.310730] env[68437]: DEBUG nova.virt.hardware [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 779.311289] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c3a11c-4bc0-4112-ae01-42c083b24740 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.326839] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a027b13-c922-404b-aa1f-907a062feea5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.331754] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526d54fe-0ac7-4423-7b05-66d65796be6c, 'name': SearchDatastore_Task, 'duration_secs': 0.010698} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.332414] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.332908] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb/5202b708-179c-48d2-9c4e-2bb5ab1a6ebb.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 779.333387] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1721d0e9-0f2b-4d5c-bc1b-2af2ff974d0f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.347386] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 779.347386] env[68437]: value = "task-2943959" [ 779.347386] env[68437]: _type = "Task" [ 779.347386] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.357411] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.410076] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.160s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.412130] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943958, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.412632] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.749s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.412830] env[68437]: DEBUG nova.objects.instance [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 779.441586] env[68437]: INFO nova.scheduler.client.report [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Deleted allocations for instance d7c64aa1-44f8-44f4-9fb6-463033837736 [ 779.861390] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943959, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481885} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.861738] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb/5202b708-179c-48d2-9c4e-2bb5ab1a6ebb.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 779.862021] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.862309] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-469a97b6-0e36-4b9c-9deb-c6bf8f78b55b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.870480] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 779.870480] env[68437]: value = "task-2943960" [ 779.870480] env[68437]: _type = "Task" [ 779.870480] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.879843] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943960, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.914581] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943958, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.934450] env[68437]: DEBUG nova.compute.manager [req-57af6856-5b23-4cbe-8239-3d42c6adcafd req-86f17520-1b65-483a-b6a9-b515a984a5bd service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Received event network-vif-plugged-00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 779.934450] env[68437]: DEBUG oslo_concurrency.lockutils [req-57af6856-5b23-4cbe-8239-3d42c6adcafd req-86f17520-1b65-483a-b6a9-b515a984a5bd service nova] Acquiring lock "efed858a-44b9-45b7-8778-22183549088c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.934450] env[68437]: DEBUG oslo_concurrency.lockutils [req-57af6856-5b23-4cbe-8239-3d42c6adcafd req-86f17520-1b65-483a-b6a9-b515a984a5bd service nova] Lock "efed858a-44b9-45b7-8778-22183549088c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.934450] env[68437]: DEBUG oslo_concurrency.lockutils [req-57af6856-5b23-4cbe-8239-3d42c6adcafd req-86f17520-1b65-483a-b6a9-b515a984a5bd service nova] Lock "efed858a-44b9-45b7-8778-22183549088c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.934450] env[68437]: DEBUG nova.compute.manager [req-57af6856-5b23-4cbe-8239-3d42c6adcafd req-86f17520-1b65-483a-b6a9-b515a984a5bd service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] No waiting events found dispatching network-vif-plugged-00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 779.934698] env[68437]: WARNING nova.compute.manager [req-57af6856-5b23-4cbe-8239-3d42c6adcafd req-86f17520-1b65-483a-b6a9-b515a984a5bd service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Received unexpected event network-vif-plugged-00b2c6d2-93a6-4590-8a40-ae6de710ee7e for instance with vm_state building and task_state spawning. [ 779.948827] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5491eff-a0dc-4e92-b974-7ab2c0dac53b tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "d7c64aa1-44f8-44f4-9fb6-463033837736" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.468s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.044606] env[68437]: DEBUG nova.network.neutron [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Successfully updated port: 00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 780.240027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "9a7c248f-5262-4f03-aace-f22c4976bb0f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.240027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.240027] env[68437]: DEBUG nova.compute.manager [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 780.240027] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0d501a-146f-4383-a3e6-a95407fb0d6f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.247710] env[68437]: DEBUG nova.compute.manager [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 780.250091] env[68437]: DEBUG nova.objects.instance [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lazy-loading 'flavor' on Instance uuid 9a7c248f-5262-4f03-aace-f22c4976bb0f {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 780.381056] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943960, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071942} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.381371] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 780.382208] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39159ecb-828e-4083-8c58-57cf648a87c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.406101] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb/5202b708-179c-48d2-9c4e-2bb5ab1a6ebb.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.406869] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-592073a8-192c-4247-8cec-ae670c9497c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.430840] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8d8bff35-8a6e-4fdc-bcdf-79dbde011e00 tempest-ServersAdmin275Test-499409367 tempest-ServersAdmin275Test-499409367-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.431935] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.051s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.433667] env[68437]: INFO nova.compute.claims [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.444849] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943958, 'name': ReconfigVM_Task, 'duration_secs': 1.454396} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.446745] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 39c532b1-b05e-4354-ad8f-9223b06e9488/39c532b1-b05e-4354-ad8f-9223b06e9488.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.447716] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 780.447716] env[68437]: value = "task-2943961" [ 780.447716] env[68437]: _type = "Task" [ 780.447716] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.447941] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8c1c32f-5dc4-4c4f-a9f2-7526817d5de5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.459382] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 780.459382] env[68437]: value = "task-2943962" [ 780.459382] env[68437]: _type = "Task" [ 780.459382] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.463166] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943961, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.473788] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943962, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.548635] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.548829] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquired lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.548988] env[68437]: DEBUG nova.network.neutron [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 780.968449] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943961, 'name': ReconfigVM_Task, 'duration_secs': 0.291304} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.973021] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb/5202b708-179c-48d2-9c4e-2bb5ab1a6ebb.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.973686] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c293d7e2-5626-455e-9d3e-aa7653c33ad4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.982196] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943962, 'name': Rename_Task, 'duration_secs': 0.286891} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.983544] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 780.983870] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 780.983870] env[68437]: value = "task-2943963" [ 780.983870] env[68437]: _type = "Task" [ 780.983870] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.984086] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-337c4a4b-5c60-40d0-9771-3361c6a098d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.993372] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 780.993372] env[68437]: value = "task-2943964" [ 780.993372] env[68437]: _type = "Task" [ 780.993372] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.997991] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943963, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.084378] env[68437]: DEBUG nova.network.neutron [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 781.255222] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.255580] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c5fd504-f359-4c79-b8a8-9b8cc00ce4bb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.265599] env[68437]: DEBUG oslo_vmware.api [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 781.265599] env[68437]: value = "task-2943965" [ 781.265599] env[68437]: _type = "Task" [ 781.265599] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.276337] env[68437]: DEBUG oslo_vmware.api [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.294443] env[68437]: DEBUG nova.network.neutron [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Updating instance_info_cache with network_info: [{"id": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "address": "fa:16:3e:1a:3e:19", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b2c6d2-93", "ovs_interfaceid": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.495567] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943963, 'name': Rename_Task, 'duration_secs': 0.153997} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.495859] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 781.496125] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1653981d-80e5-4eb2-ac55-0af1c70bdd63 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.503334] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 781.503334] env[68437]: value = "task-2943966" [ 781.503334] env[68437]: _type = "Task" [ 781.503334] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.507030] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943964, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.514616] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.688369] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.688647] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.776797] env[68437]: DEBUG oslo_vmware.api [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943965, 'name': PowerOffVM_Task, 'duration_secs': 0.270227} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.777121] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 781.777362] env[68437]: DEBUG nova.compute.manager [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.778231] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d92cef2-cd2e-44ff-8feb-cf2ce32a7bd6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.797498] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Releasing lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.798231] env[68437]: DEBUG nova.compute.manager [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Instance network_info: |[{"id": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "address": "fa:16:3e:1a:3e:19", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b2c6d2-93", "ovs_interfaceid": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 781.800923] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:3e:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '863474bc-a24a-4823-828c-580a187829e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00b2c6d2-93a6-4590-8a40-ae6de710ee7e', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 781.808503] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 781.809208] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efed858a-44b9-45b7-8778-22183549088c] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 781.809292] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90688a1e-84dc-4934-a2bc-252f7f3c8949 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.834844] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 781.834844] env[68437]: value = "task-2943967" [ 781.834844] env[68437]: _type = "Task" [ 781.834844] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.846506] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943967, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.964135] env[68437]: DEBUG nova.compute.manager [req-47223bba-5dfa-43d5-b6c0-e202f199f9c1 req-bb489003-ec5e-44a7-afa5-1bb8713e3092 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Received event network-changed-00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 781.964135] env[68437]: DEBUG nova.compute.manager [req-47223bba-5dfa-43d5-b6c0-e202f199f9c1 req-bb489003-ec5e-44a7-afa5-1bb8713e3092 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Refreshing instance network info cache due to event network-changed-00b2c6d2-93a6-4590-8a40-ae6de710ee7e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 781.964352] env[68437]: DEBUG oslo_concurrency.lockutils [req-47223bba-5dfa-43d5-b6c0-e202f199f9c1 req-bb489003-ec5e-44a7-afa5-1bb8713e3092 service nova] Acquiring lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.964518] env[68437]: DEBUG oslo_concurrency.lockutils [req-47223bba-5dfa-43d5-b6c0-e202f199f9c1 req-bb489003-ec5e-44a7-afa5-1bb8713e3092 service nova] Acquired lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.964733] env[68437]: DEBUG nova.network.neutron [req-47223bba-5dfa-43d5-b6c0-e202f199f9c1 req-bb489003-ec5e-44a7-afa5-1bb8713e3092 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Refreshing network info cache for port 00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 782.008324] env[68437]: DEBUG oslo_vmware.api [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2943964, 'name': PowerOnVM_Task, 'duration_secs': 0.801912} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.013673] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 782.013888] env[68437]: INFO nova.compute.manager [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Took 8.03 seconds to spawn the instance on the hypervisor. [ 782.014095] env[68437]: DEBUG nova.compute.manager [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 782.015283] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7b2f29-be89-4a17-aae3-f1b9e5354321 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.032102] env[68437]: DEBUG oslo_vmware.api [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943966, 'name': PowerOnVM_Task, 'duration_secs': 0.488944} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.032816] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 782.033120] env[68437]: INFO nova.compute.manager [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Took 5.42 seconds to spawn the instance on the hypervisor. [ 782.033359] env[68437]: DEBUG nova.compute.manager [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 782.034192] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a9b88f-b805-46eb-afac-cab523e7ef64 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.158789] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6e36b4-fdb0-4f40-ac91-4302c679ffd0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.170720] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c095f5-f3f3-4397-aa47-5e7da3a61124 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.204551] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c51f4c8-6123-405b-8a75-c99c2d7e6ce3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.214096] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed80852-d8ae-4a76-9c40-7c07a48bef62 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.228128] env[68437]: DEBUG nova.compute.provider_tree [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.292989] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2066e78a-6599-4970-8042-587d638edebd tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.348047] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943967, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.541530] env[68437]: INFO nova.compute.manager [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Took 40.72 seconds to build instance. [ 782.558261] env[68437]: INFO nova.compute.manager [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Took 39.17 seconds to build instance. [ 782.733996] env[68437]: DEBUG nova.scheduler.client.report [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 782.778362] env[68437]: DEBUG nova.network.neutron [req-47223bba-5dfa-43d5-b6c0-e202f199f9c1 req-bb489003-ec5e-44a7-afa5-1bb8713e3092 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Updated VIF entry in instance network info cache for port 00b2c6d2-93a6-4590-8a40-ae6de710ee7e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 782.778773] env[68437]: DEBUG nova.network.neutron [req-47223bba-5dfa-43d5-b6c0-e202f199f9c1 req-bb489003-ec5e-44a7-afa5-1bb8713e3092 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Updating instance_info_cache with network_info: [{"id": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "address": "fa:16:3e:1a:3e:19", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b2c6d2-93", "ovs_interfaceid": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.785672] env[68437]: DEBUG nova.objects.instance [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lazy-loading 'flavor' on Instance uuid 9a7c248f-5262-4f03-aace-f22c4976bb0f {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 782.847743] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943967, 'name': CreateVM_Task, 'duration_secs': 0.654343} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.847841] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: efed858a-44b9-45b7-8778-22183549088c] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 782.848650] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.848750] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.850060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 782.850060] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c8f3c83-b7e4-4a60-a9b8-9c57b0ef7cf9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.855719] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 782.855719] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522f13d9-1f74-7279-13a0-f5aa9c1447a2" [ 782.855719] env[68437]: _type = "Task" [ 782.855719] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.865143] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522f13d9-1f74-7279-13a0-f5aa9c1447a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.042985] env[68437]: DEBUG oslo_concurrency.lockutils [None req-df74e38b-3aee-4bd6-bda3-f6593f28a016 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "39c532b1-b05e-4354-ad8f-9223b06e9488" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.923s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.063862] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5488222c-14b5-41c4-9eab-1022127e2324 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "5202b708-179c-48d2-9c4e-2bb5ab1a6ebb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.105s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.242833] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.810s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.243219] env[68437]: DEBUG nova.compute.manager [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 783.247175] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.593s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.248676] env[68437]: INFO nova.compute.claims [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.281474] env[68437]: DEBUG oslo_concurrency.lockutils [req-47223bba-5dfa-43d5-b6c0-e202f199f9c1 req-bb489003-ec5e-44a7-afa5-1bb8713e3092 service nova] Releasing lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.294866] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "refresh_cache-9a7c248f-5262-4f03-aace-f22c4976bb0f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.294866] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquired lock "refresh_cache-9a7c248f-5262-4f03-aace-f22c4976bb0f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.294866] env[68437]: DEBUG nova.network.neutron [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 783.294866] env[68437]: DEBUG nova.objects.instance [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lazy-loading 'info_cache' on Instance uuid 9a7c248f-5262-4f03-aace-f22c4976bb0f {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.366931] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522f13d9-1f74-7279-13a0-f5aa9c1447a2, 'name': SearchDatastore_Task, 'duration_secs': 0.011264} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.367266] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.367501] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 783.367734] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.367880] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.368072] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 783.368346] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c8266cd-f046-45b3-a22f-16129963dbdb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.377585] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 783.377704] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 783.378430] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf53419d-fdfe-4f73-8305-63cf31178307 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.387155] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 783.387155] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521d78fa-a755-1e84-6120-69f43809a2b3" [ 783.387155] env[68437]: _type = "Task" [ 783.387155] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.395410] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521d78fa-a755-1e84-6120-69f43809a2b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.545806] env[68437]: DEBUG nova.compute.manager [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 783.567831] env[68437]: DEBUG nova.compute.manager [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 783.753298] env[68437]: DEBUG nova.compute.utils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 783.756709] env[68437]: DEBUG nova.compute.manager [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 783.756868] env[68437]: DEBUG nova.network.neutron [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 783.800526] env[68437]: DEBUG nova.objects.base [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Object Instance<9a7c248f-5262-4f03-aace-f22c4976bb0f> lazy-loaded attributes: flavor,info_cache {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 783.813460] env[68437]: DEBUG nova.policy [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec1074dd1b444e45beadcccfe6671c76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f1c3ca0e78f472e8c127fa68ed610f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 783.901742] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521d78fa-a755-1e84-6120-69f43809a2b3, 'name': SearchDatastore_Task, 'duration_secs': 0.009978} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.902708] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88ec5abb-a5b8-479a-b8c8-9fa2c75e8a50 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.909905] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 783.909905] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52136e6f-5f83-b4d5-f3ae-ba55fd9e2490" [ 783.909905] env[68437]: _type = "Task" [ 783.909905] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.918876] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52136e6f-5f83-b4d5-f3ae-ba55fd9e2490, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.067418] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.090190] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.252657] env[68437]: DEBUG nova.network.neutron [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Successfully created port: 4d42d18c-3f02-433f-886e-903fef8696e7 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.257212] env[68437]: DEBUG nova.compute.manager [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 784.376950] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.377329] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.426755] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52136e6f-5f83-b4d5-f3ae-ba55fd9e2490, 'name': SearchDatastore_Task, 'duration_secs': 0.009572} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.429215] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.429556] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] efed858a-44b9-45b7-8778-22183549088c/efed858a-44b9-45b7-8778-22183549088c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 784.429846] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-814737fe-ade2-4dbf-b886-b6b3f6617992 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.441323] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 784.441323] env[68437]: value = "task-2943968" [ 784.441323] env[68437]: _type = "Task" [ 784.441323] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.453582] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943968, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.477758] env[68437]: INFO nova.compute.manager [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Rebuilding instance [ 784.540771] env[68437]: DEBUG nova.compute.manager [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 784.541700] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9753193-0776-4c67-9324-325e39835715 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.576121] env[68437]: DEBUG nova.network.neutron [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Updating instance_info_cache with network_info: [{"id": "ed35f15a-aaef-467f-9f0a-437e412e5bb4", "address": "fa:16:3e:5f:f5:32", "network": {"id": "1697bd03-1f3a-434e-9653-88bf378a1c38", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-471846936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fb35616c64449f9a7b85f2d7e7d3c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "101a44fc-ffde-4e3e-ad82-363454ae458b", "external-id": "nsx-vlan-transportzone-723", "segmentation_id": 723, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped35f15a-aa", "ovs_interfaceid": "ed35f15a-aaef-467f-9f0a-437e412e5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.838147] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9479d0-ce60-4b51-83f4-dce29c52a5ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.847827] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a31018-c27d-471b-9d08-df8654d1929a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.883413] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1775c6-ee87-4167-9150-37ada3d16d34 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.892363] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35432258-ba58-4f09-a793-869aa9b1e2a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.907463] env[68437]: DEBUG nova.compute.provider_tree [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 784.952717] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943968, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469151} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.952996] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] efed858a-44b9-45b7-8778-22183549088c/efed858a-44b9-45b7-8778-22183549088c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 784.953233] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 784.953495] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1724b04-5c23-4ced-a598-9083312e1e8e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.962994] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 784.962994] env[68437]: value = "task-2943969" [ 784.962994] env[68437]: _type = "Task" [ 784.962994] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.973715] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943969, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.075886] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Releasing lock "refresh_cache-9a7c248f-5262-4f03-aace-f22c4976bb0f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.273847] env[68437]: DEBUG nova.compute.manager [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 785.295540] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 785.295793] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.295958] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 785.296243] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.296431] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 785.296498] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 785.296700] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 785.296820] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 785.297052] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 785.297171] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 785.297627] env[68437]: DEBUG nova.virt.hardware [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 785.298564] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ec5e06-bae2-4fe1-aa41-2a27f049986c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.307428] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fd71f1-e9fd-46d4-a33a-42fee5255dbd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.410560] env[68437]: DEBUG nova.scheduler.client.report [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 785.474541] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943969, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068475} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.474822] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 785.475646] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b70c2ce-c15b-4ee7-ab34-c72512e1a7f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.498483] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] efed858a-44b9-45b7-8778-22183549088c/efed858a-44b9-45b7-8778-22183549088c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 785.499134] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-914c1608-c742-4f5b-a015-92bc70717250 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.522125] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 785.522125] env[68437]: value = "task-2943970" [ 785.522125] env[68437]: _type = "Task" [ 785.522125] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.530970] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.558802] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 785.559049] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31e34f3d-e475-40eb-8d40-a97127179e80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.567948] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 785.567948] env[68437]: value = "task-2943971" [ 785.567948] env[68437]: _type = "Task" [ 785.567948] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.579066] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943971, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.915597] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.916179] env[68437]: DEBUG nova.compute.manager [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 785.919133] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.378s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.920429] env[68437]: INFO nova.compute.claims [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.034245] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943970, 'name': ReconfigVM_Task, 'duration_secs': 0.379823} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.034496] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Reconfigured VM instance instance-00000023 to attach disk [datastore1] efed858a-44b9-45b7-8778-22183549088c/efed858a-44b9-45b7-8778-22183549088c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.035138] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20290fe9-c04a-41c9-9b98-7967bb420d67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.042254] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 786.042254] env[68437]: value = "task-2943972" [ 786.042254] env[68437]: _type = "Task" [ 786.042254] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.052198] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943972, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.079538] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943971, 'name': PowerOffVM_Task, 'duration_secs': 0.130773} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.079846] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 786.080524] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 786.081347] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956e17f9-fa51-447b-86b2-5feea52a61b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.084408] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 786.084755] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b3ac000-5750-4084-8d87-3b6a0b2c2807 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.091525] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 786.092802] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6dbf753c-89fb-4bc8-a9b9-503bedf47f33 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.094457] env[68437]: DEBUG oslo_vmware.api [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 786.094457] env[68437]: value = "task-2943973" [ 786.094457] env[68437]: _type = "Task" [ 786.094457] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.102511] env[68437]: DEBUG oslo_vmware.api [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.123674] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 786.123919] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 786.124213] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Deleting the datastore file [datastore1] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 786.124493] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0d23716-6319-491b-9411-52d687c1fbc2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.133015] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 786.133015] env[68437]: value = "task-2943975" [ 786.133015] env[68437]: _type = "Task" [ 786.133015] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.142325] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.219236] env[68437]: DEBUG nova.compute.manager [req-d2a5a78e-7ae5-4208-b565-b9a79a76924f req-9c5e28cb-af1d-4d6c-a66e-175c465246d1 service nova] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Received event network-vif-plugged-4d42d18c-3f02-433f-886e-903fef8696e7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 786.219454] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2a5a78e-7ae5-4208-b565-b9a79a76924f req-9c5e28cb-af1d-4d6c-a66e-175c465246d1 service nova] Acquiring lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.219654] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2a5a78e-7ae5-4208-b565-b9a79a76924f req-9c5e28cb-af1d-4d6c-a66e-175c465246d1 service nova] Lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.219816] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2a5a78e-7ae5-4208-b565-b9a79a76924f req-9c5e28cb-af1d-4d6c-a66e-175c465246d1 service nova] Lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.219979] env[68437]: DEBUG nova.compute.manager [req-d2a5a78e-7ae5-4208-b565-b9a79a76924f req-9c5e28cb-af1d-4d6c-a66e-175c465246d1 service nova] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] No waiting events found dispatching network-vif-plugged-4d42d18c-3f02-433f-886e-903fef8696e7 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 786.220418] env[68437]: WARNING nova.compute.manager [req-d2a5a78e-7ae5-4208-b565-b9a79a76924f req-9c5e28cb-af1d-4d6c-a66e-175c465246d1 service nova] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Received unexpected event network-vif-plugged-4d42d18c-3f02-433f-886e-903fef8696e7 for instance with vm_state building and task_state spawning. [ 786.318064] env[68437]: DEBUG nova.network.neutron [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Successfully updated port: 4d42d18c-3f02-433f-886e-903fef8696e7 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 786.425240] env[68437]: DEBUG nova.compute.utils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 786.429418] env[68437]: DEBUG nova.compute.manager [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 786.429418] env[68437]: DEBUG nova.network.neutron [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 786.470274] env[68437]: DEBUG nova.policy [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8f30aea8b2b4ea1a6eb7d30875a4c5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '188b948736f44dfa8dd9aeb258180c58', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 786.553543] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943972, 'name': Rename_Task, 'duration_secs': 0.163668} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.553914] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 786.554260] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89c1d8b3-118f-4114-aea3-0d4441f9f1a8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.562848] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 786.562848] env[68437]: value = "task-2943976" [ 786.562848] env[68437]: _type = "Task" [ 786.562848] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.572332] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943976, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.606389] env[68437]: DEBUG oslo_vmware.api [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943973, 'name': PowerOnVM_Task, 'duration_secs': 0.429152} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.606773] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 786.607047] env[68437]: DEBUG nova.compute.manager [None req-bbd3420e-e9fa-45d4-a4f8-428e705c7ccc tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 786.608009] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d7caca-92b4-4978-8593-4cdb3ff338ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.643562] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145036} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.643834] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 786.644500] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 786.644798] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 786.820031] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "refresh_cache-6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.820138] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "refresh_cache-6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.820294] env[68437]: DEBUG nova.network.neutron [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 786.904984] env[68437]: DEBUG nova.network.neutron [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Successfully created port: c0002143-a475-44e6-afd5-c6389c790504 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 786.929933] env[68437]: DEBUG nova.compute.manager [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 787.075805] env[68437]: DEBUG oslo_vmware.api [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943976, 'name': PowerOnVM_Task, 'duration_secs': 0.50321} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.076535] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 787.076817] env[68437]: INFO nova.compute.manager [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Took 7.80 seconds to spawn the instance on the hypervisor. [ 787.077024] env[68437]: DEBUG nova.compute.manager [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 787.081472] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ec225e-cfc5-47e8-a799-149765c69e5a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.363387] env[68437]: DEBUG nova.network.neutron [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 787.502168] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380cfd0c-11d9-49da-92c7-21cae21689b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.510718] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18087fa-2a07-4e32-a2ac-1c78c1f02201 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.546042] env[68437]: DEBUG nova.network.neutron [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Updating instance_info_cache with network_info: [{"id": "4d42d18c-3f02-433f-886e-903fef8696e7", "address": "fa:16:3e:44:84:10", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d42d18c-3f", "ovs_interfaceid": "4d42d18c-3f02-433f-886e-903fef8696e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.547962] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efbb5c8-179c-407a-a35b-3b5037140691 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.556929] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e4caef-8f78-4694-83ff-e227c1786bcb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.574016] env[68437]: DEBUG nova.compute.provider_tree [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.601160] env[68437]: INFO nova.compute.manager [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Took 39.67 seconds to build instance. [ 787.687407] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.687653] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.687846] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.688052] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.688208] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.688359] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.688574] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.688733] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.688901] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.689079] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.689252] env[68437]: DEBUG nova.virt.hardware [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.690134] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab37abca-1fd4-4d8a-a5f4-a5df62a83eca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.699218] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4ba910-2a8b-4f17-9594-29909b636e15 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.713530] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 787.722022] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 787.722022] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 787.722022] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62e32842-6337-4d9a-8c03-1c7e5d25c853 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.738235] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 787.738235] env[68437]: value = "task-2943977" [ 787.738235] env[68437]: _type = "Task" [ 787.738235] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.746804] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943977, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.946267] env[68437]: DEBUG nova.compute.manager [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 787.976126] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.976450] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.976649] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.976894] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.977135] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.977453] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.977617] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.977798] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.978023] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.978254] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.978494] env[68437]: DEBUG nova.virt.hardware [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.979586] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e74a74-c1ed-4bdc-8c3a-662effa19157 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.995066] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5982a587-d53f-4eb8-bad1-697a3e58e687 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.051844] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "refresh_cache-6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.052215] env[68437]: DEBUG nova.compute.manager [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Instance network_info: |[{"id": "4d42d18c-3f02-433f-886e-903fef8696e7", "address": "fa:16:3e:44:84:10", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d42d18c-3f", "ovs_interfaceid": "4d42d18c-3f02-433f-886e-903fef8696e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 788.052685] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:84:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d42d18c-3f02-433f-886e-903fef8696e7', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 788.061325] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 788.061645] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 788.061900] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06e96e9c-c0da-44c5-ac9d-6572fb7a844f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.078579] env[68437]: DEBUG nova.scheduler.client.report [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 788.090497] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 788.090497] env[68437]: value = "task-2943978" [ 788.090497] env[68437]: _type = "Task" [ 788.090497] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.097995] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943978, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.105632] env[68437]: DEBUG oslo_concurrency.lockutils [None req-14f87b03-2d80-48e1-a264-265d3e579871 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "efed858a-44b9-45b7-8778-22183549088c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.557s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.250122] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943977, 'name': CreateVM_Task, 'duration_secs': 0.291297} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.250332] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 788.250833] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.251062] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.251382] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 788.252117] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-641c2b39-67fd-4b2b-a60e-bb63f49343c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.258598] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 788.258598] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52631716-322e-85b5-cfb4-ec6fa85d4d5f" [ 788.258598] env[68437]: _type = "Task" [ 788.258598] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.270523] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52631716-322e-85b5-cfb4-ec6fa85d4d5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.588272] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.588272] env[68437]: DEBUG nova.compute.manager [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 788.589886] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.951s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.590154] env[68437]: DEBUG nova.objects.instance [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lazy-loading 'resources' on Instance uuid 67312d87-cc63-4dc7-b9c1-9c8d349a4756 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 788.604585] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943978, 'name': CreateVM_Task, 'duration_secs': 0.371661} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.604753] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 788.605591] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.609043] env[68437]: DEBUG nova.compute.manager [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 788.659853] env[68437]: DEBUG nova.compute.manager [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Received event network-changed-4d42d18c-3f02-433f-886e-903fef8696e7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 788.660045] env[68437]: DEBUG nova.compute.manager [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Refreshing instance network info cache due to event network-changed-4d42d18c-3f02-433f-886e-903fef8696e7. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 788.660260] env[68437]: DEBUG oslo_concurrency.lockutils [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] Acquiring lock "refresh_cache-6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.662454] env[68437]: DEBUG oslo_concurrency.lockutils [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] Acquired lock "refresh_cache-6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.662454] env[68437]: DEBUG nova.network.neutron [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Refreshing network info cache for port 4d42d18c-3f02-433f-886e-903fef8696e7 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 788.695192] env[68437]: DEBUG nova.network.neutron [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Successfully updated port: c0002143-a475-44e6-afd5-c6389c790504 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 788.776658] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52631716-322e-85b5-cfb4-ec6fa85d4d5f, 'name': SearchDatastore_Task, 'duration_secs': 0.011302} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.776658] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.776658] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.776658] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.776993] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.776993] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.776993] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.776993] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 788.777241] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11a8fa28-5006-4974-886f-643e8fe2957a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.779355] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-254f3121-ade5-4765-bd65-a1e2a8eb2bc2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.785806] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 788.785806] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52980a3b-2e63-1232-b48d-127a95f21b94" [ 788.785806] env[68437]: _type = "Task" [ 788.785806] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.794593] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52980a3b-2e63-1232-b48d-127a95f21b94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.795917] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 788.796030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 788.796690] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64354b6f-ed57-48bb-8b2c-a4ccdf0f70ec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.803326] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 788.803326] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52afac4a-cd81-7dc6-cac5-36b9bd485c00" [ 788.803326] env[68437]: _type = "Task" [ 788.803326] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.814280] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52afac4a-cd81-7dc6-cac5-36b9bd485c00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.093528] env[68437]: DEBUG nova.compute.utils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 789.095324] env[68437]: DEBUG nova.compute.manager [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 789.095596] env[68437]: DEBUG nova.network.neutron [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 789.127830] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.177952] env[68437]: DEBUG nova.policy [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0e66fd345044e92857d742c65f537ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a36ec823128647758ca8047a5ebf1ae1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 789.199376] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "refresh_cache-013a92cc-0fc2-4e85-aee6-efb62bae4dcb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.199750] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "refresh_cache-013a92cc-0fc2-4e85-aee6-efb62bae4dcb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.200091] env[68437]: DEBUG nova.network.neutron [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 789.304378] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52980a3b-2e63-1232-b48d-127a95f21b94, 'name': SearchDatastore_Task, 'duration_secs': 0.028176} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.312206] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.312583] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 789.312878] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.323905] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52afac4a-cd81-7dc6-cac5-36b9bd485c00, 'name': SearchDatastore_Task, 'duration_secs': 0.020284} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.324987] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc810814-0d97-45b0-ad91-5c2e6e0ff6e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.333977] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 789.333977] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ddf4d9-bbb9-3ec5-9ed7-8c4c9a48fde9" [ 789.333977] env[68437]: _type = "Task" [ 789.333977] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.340318] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "efed858a-44b9-45b7-8778-22183549088c" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.341040] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "efed858a-44b9-45b7-8778-22183549088c" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.341040] env[68437]: INFO nova.compute.manager [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Rebooting instance [ 789.350527] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ddf4d9-bbb9-3ec5-9ed7-8c4c9a48fde9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.435547] env[68437]: DEBUG nova.network.neutron [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Updated VIF entry in instance network info cache for port 4d42d18c-3f02-433f-886e-903fef8696e7. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 789.435916] env[68437]: DEBUG nova.network.neutron [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Updating instance_info_cache with network_info: [{"id": "4d42d18c-3f02-433f-886e-903fef8696e7", "address": "fa:16:3e:44:84:10", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d42d18c-3f", "ovs_interfaceid": "4d42d18c-3f02-433f-886e-903fef8696e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.598869] env[68437]: DEBUG nova.compute.manager [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 789.679564] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fa8e3b-0db0-487b-a7a6-09bc5bede1ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.687818] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc88caaa-1527-499c-b9e4-058f433b9e26 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.726706] env[68437]: DEBUG nova.network.neutron [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Successfully created port: 361c162e-f032-4355-aa03-a7b16b7ad181 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 789.729170] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfab1b2a-3f8e-4155-bb19-4c7bdbb1ef26 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.738580] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5325e672-93b0-42a2-ad81-6ecb04812d32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.754713] env[68437]: DEBUG nova.compute.provider_tree [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.761231] env[68437]: DEBUG nova.network.neutron [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 789.845497] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ddf4d9-bbb9-3ec5-9ed7-8c4c9a48fde9, 'name': SearchDatastore_Task, 'duration_secs': 0.017712} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.845961] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.846169] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb/5202b708-179c-48d2-9c4e-2bb5ab1a6ebb.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 789.846467] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.846660] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 789.847620] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c2f9fe6-3ce8-42fb-8551-a030c9a39310 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.849928] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cda85f7f-93a9-497f-8029-017bc647d895 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.860273] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 789.860273] env[68437]: value = "task-2943979" [ 789.860273] env[68437]: _type = "Task" [ 789.860273] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.865624] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 789.866399] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 789.867303] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca4a460c-f318-4fcc-97ad-08d10ccb12af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.876309] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943979, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.881691] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.881995] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquired lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.882593] env[68437]: DEBUG nova.network.neutron [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 789.886805] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 789.886805] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e94c44-e798-9c86-1517-e8ae35f04e98" [ 789.886805] env[68437]: _type = "Task" [ 789.886805] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.898364] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e94c44-e798-9c86-1517-e8ae35f04e98, 'name': SearchDatastore_Task} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.899568] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27e0a3a3-0f89-450e-8359-a96b5ba61172 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.907203] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 789.907203] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526c361c-f1cf-83b0-b468-4be2ce36e48f" [ 789.907203] env[68437]: _type = "Task" [ 789.907203] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.916965] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526c361c-f1cf-83b0-b468-4be2ce36e48f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.917972] env[68437]: DEBUG nova.network.neutron [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Updating instance_info_cache with network_info: [{"id": "c0002143-a475-44e6-afd5-c6389c790504", "address": "fa:16:3e:a7:7e:ad", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0002143-a4", "ovs_interfaceid": "c0002143-a475-44e6-afd5-c6389c790504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.940534] env[68437]: DEBUG oslo_concurrency.lockutils [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] Releasing lock "refresh_cache-6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.940804] env[68437]: DEBUG nova.compute.manager [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Received event network-changed-00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 789.940981] env[68437]: DEBUG nova.compute.manager [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Refreshing instance network info cache due to event network-changed-00b2c6d2-93a6-4590-8a40-ae6de710ee7e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 789.941165] env[68437]: DEBUG oslo_concurrency.lockutils [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] Acquiring lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.264115] env[68437]: DEBUG nova.scheduler.client.report [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 790.371482] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943979, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.420932] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "refresh_cache-013a92cc-0fc2-4e85-aee6-efb62bae4dcb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.421279] env[68437]: DEBUG nova.compute.manager [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Instance network_info: |[{"id": "c0002143-a475-44e6-afd5-c6389c790504", "address": "fa:16:3e:a7:7e:ad", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0002143-a4", "ovs_interfaceid": "c0002143-a475-44e6-afd5-c6389c790504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 790.421586] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526c361c-f1cf-83b0-b468-4be2ce36e48f, 'name': SearchDatastore_Task, 'duration_secs': 0.013589} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.421931] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:7e:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0002143-a475-44e6-afd5-c6389c790504', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.430873] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 790.431117] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.431391] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77/6b10ff9f-3248-46fe-9cd4-19e0ebbcee77.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 790.435025] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.435025] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d28020d1-bc41-48f3-a304-396d8a4a4531 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.436594] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0647d43-ae6e-461e-97c2-b25bf9462d0c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.459900] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 790.459900] env[68437]: value = "task-2943980" [ 790.459900] env[68437]: _type = "Task" [ 790.459900] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.461521] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.461521] env[68437]: value = "task-2943981" [ 790.461521] env[68437]: _type = "Task" [ 790.461521] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.477563] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.477792] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943981, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.615509] env[68437]: DEBUG nova.compute.manager [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 790.644927] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 790.645201] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 790.645365] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 790.645591] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 790.645752] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 790.645903] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 790.646129] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 790.646294] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 790.646470] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 790.646636] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 790.646809] env[68437]: DEBUG nova.virt.hardware [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 790.647739] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7905d096-d32d-4059-8fb7-211687bd698a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.657742] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33594d08-e358-46d9-9ec6-c6664df64bf5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.662715] env[68437]: DEBUG nova.network.neutron [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Updating instance_info_cache with network_info: [{"id": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "address": "fa:16:3e:1a:3e:19", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b2c6d2-93", "ovs_interfaceid": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.688813] env[68437]: DEBUG nova.compute.manager [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Received event network-vif-plugged-c0002143-a475-44e6-afd5-c6389c790504 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 790.689130] env[68437]: DEBUG oslo_concurrency.lockutils [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] Acquiring lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.689272] env[68437]: DEBUG oslo_concurrency.lockutils [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] Lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.689452] env[68437]: DEBUG oslo_concurrency.lockutils [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] Lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.689778] env[68437]: DEBUG nova.compute.manager [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] No waiting events found dispatching network-vif-plugged-c0002143-a475-44e6-afd5-c6389c790504 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 790.690052] env[68437]: WARNING nova.compute.manager [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Received unexpected event network-vif-plugged-c0002143-a475-44e6-afd5-c6389c790504 for instance with vm_state building and task_state spawning. [ 790.690052] env[68437]: DEBUG nova.compute.manager [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Received event network-changed-c0002143-a475-44e6-afd5-c6389c790504 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 790.690449] env[68437]: DEBUG nova.compute.manager [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Refreshing instance network info cache due to event network-changed-c0002143-a475-44e6-afd5-c6389c790504. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 790.690673] env[68437]: DEBUG oslo_concurrency.lockutils [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] Acquiring lock "refresh_cache-013a92cc-0fc2-4e85-aee6-efb62bae4dcb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.690815] env[68437]: DEBUG oslo_concurrency.lockutils [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] Acquired lock "refresh_cache-013a92cc-0fc2-4e85-aee6-efb62bae4dcb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.691041] env[68437]: DEBUG nova.network.neutron [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Refreshing network info cache for port c0002143-a475-44e6-afd5-c6389c790504 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 790.769078] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.178s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.771046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.108s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.771349] env[68437]: DEBUG nova.objects.instance [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lazy-loading 'resources' on Instance uuid 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 790.794685] env[68437]: INFO nova.scheduler.client.report [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleted allocations for instance 67312d87-cc63-4dc7-b9c1-9c8d349a4756 [ 790.872466] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943979, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.895274} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.872755] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb/5202b708-179c-48d2-9c4e-2bb5ab1a6ebb.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 790.872969] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 790.873251] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c00d60e-e881-4c7b-9fd8-42b6abd09318 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.881582] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 790.881582] env[68437]: value = "task-2943982" [ 790.881582] env[68437]: _type = "Task" [ 790.881582] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.909171] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.977214] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.979966] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943981, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.165719] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Releasing lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.167067] env[68437]: DEBUG oslo_concurrency.lockutils [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] Acquired lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.167378] env[68437]: DEBUG nova.network.neutron [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Refreshing network info cache for port 00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 791.311729] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5cc4862f-b3b0-4482-952f-d900697231b0 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "67312d87-cc63-4dc7-b9c1-9c8d349a4756" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.223s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.317734] env[68437]: DEBUG oslo_concurrency.lockutils [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "e3855111-7678-42c5-a37e-25e8587416aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.318870] env[68437]: DEBUG oslo_concurrency.lockutils [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "e3855111-7678-42c5-a37e-25e8587416aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.318870] env[68437]: DEBUG oslo_concurrency.lockutils [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "e3855111-7678-42c5-a37e-25e8587416aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.319040] env[68437]: DEBUG oslo_concurrency.lockutils [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "e3855111-7678-42c5-a37e-25e8587416aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.319268] env[68437]: DEBUG oslo_concurrency.lockutils [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "e3855111-7678-42c5-a37e-25e8587416aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.322925] env[68437]: INFO nova.compute.manager [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Terminating instance [ 791.399445] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.27358} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.404812] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 791.405084] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450fda9d-a31d-405d-b07b-c64832135c09 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.411023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Acquiring lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.411023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.437015] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb/5202b708-179c-48d2-9c4e-2bb5ab1a6ebb.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 791.443333] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff8c6898-4328-4e66-be3f-59f124302a8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.468813] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 791.468813] env[68437]: value = "task-2943983" [ 791.468813] env[68437]: _type = "Task" [ 791.468813] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.482839] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943980, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.487717] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943981, 'name': CreateVM_Task, 'duration_secs': 0.725343} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.492480] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.492480] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.495233] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.495440] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.495980] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.496861] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41a044da-d5aa-49cb-9ba0-5e3b4c63caa4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.503123] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 791.503123] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52283d7f-16d8-9765-3c26-ee32a8aae77f" [ 791.503123] env[68437]: _type = "Task" [ 791.503123] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.514852] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52283d7f-16d8-9765-3c26-ee32a8aae77f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.515050] env[68437]: DEBUG nova.network.neutron [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Updated VIF entry in instance network info cache for port c0002143-a475-44e6-afd5-c6389c790504. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 791.515320] env[68437]: DEBUG nova.network.neutron [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Updating instance_info_cache with network_info: [{"id": "c0002143-a475-44e6-afd5-c6389c790504", "address": "fa:16:3e:a7:7e:ad", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0002143-a4", "ovs_interfaceid": "c0002143-a475-44e6-afd5-c6389c790504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.636983] env[68437]: DEBUG nova.network.neutron [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Successfully updated port: 361c162e-f032-4355-aa03-a7b16b7ad181 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 791.670780] env[68437]: DEBUG nova.compute.manager [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.671724] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128dbda0-0f66-482c-aba2-5d26379f670a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.827295] env[68437]: DEBUG nova.compute.manager [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 791.827605] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 791.828422] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663658ce-af2c-4438-8208-a0db0234c014 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.839552] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 791.839552] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14bf1cc3-eae4-4854-b86e-10a28759ea33 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.847359] env[68437]: DEBUG oslo_vmware.api [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 791.847359] env[68437]: value = "task-2943984" [ 791.847359] env[68437]: _type = "Task" [ 791.847359] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.856233] env[68437]: DEBUG oslo_vmware.api [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943984, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.870520] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2341c1-e73c-4baa-b3a4-d75c9533c977 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.878874] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cf3ee3-6c36-4901-a532-51f89e3c0abb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.914245] env[68437]: DEBUG nova.compute.utils [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 791.916238] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4f5993-327e-4875-83cc-4e2a50d6600c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.927183] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e29de3f-1401-41de-bcde-affef44ddac7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.930357] env[68437]: DEBUG nova.network.neutron [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Updated VIF entry in instance network info cache for port 00b2c6d2-93a6-4590-8a40-ae6de710ee7e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 791.930621] env[68437]: DEBUG nova.network.neutron [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Updating instance_info_cache with network_info: [{"id": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "address": "fa:16:3e:1a:3e:19", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b2c6d2-93", "ovs_interfaceid": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.944809] env[68437]: DEBUG nova.compute.provider_tree [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.973638] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943980, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.088354} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.976933] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77/6b10ff9f-3248-46fe-9cd4-19e0ebbcee77.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.978149] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.978149] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd95f892-999a-4446-838c-b40494e4f4c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.986065] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.987497] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 791.987497] env[68437]: value = "task-2943985" [ 791.987497] env[68437]: _type = "Task" [ 791.987497] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.999085] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943985, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.014885] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52283d7f-16d8-9765-3c26-ee32a8aae77f, 'name': SearchDatastore_Task, 'duration_secs': 0.039525} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.015211] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.015445] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 792.015855] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.016089] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.016395] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.016634] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8660933-3fa3-4d24-86bf-eaa9d40b7868 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.018841] env[68437]: DEBUG oslo_concurrency.lockutils [req-f5c2fbdf-767c-4f14-b670-4cfd161efcd8 req-dbe4f48b-a39e-47eb-ab18-8ebfaaf247be service nova] Releasing lock "refresh_cache-013a92cc-0fc2-4e85-aee6-efb62bae4dcb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.029911] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.030129] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 792.030915] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc105937-371a-43d8-8c0e-f92bc34665d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.038949] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 792.038949] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ccbd6f-259f-679c-3a59-7853be175782" [ 792.038949] env[68437]: _type = "Task" [ 792.038949] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.048629] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ccbd6f-259f-679c-3a59-7853be175782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.140280] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-860107df-4e9b-44b1-9e85-b0ee3a827268" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.140430] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-860107df-4e9b-44b1-9e85-b0ee3a827268" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.140552] env[68437]: DEBUG nova.network.neutron [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 792.357486] env[68437]: DEBUG oslo_vmware.api [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943984, 'name': PowerOffVM_Task, 'duration_secs': 0.240421} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.357829] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 792.357938] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 792.358215] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-372d3251-0b84-4337-90c3-ece92ac36818 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.419975] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.429261] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 792.429517] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 792.429682] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Deleting the datastore file [datastore1] e3855111-7678-42c5-a37e-25e8587416aa {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 792.429952] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98915b43-0ac4-4ad8-aee1-068e436c50de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.433733] env[68437]: DEBUG oslo_concurrency.lockutils [req-603880c5-c895-4605-83eb-936a446bdb8e req-d995bad6-15ad-4635-985c-7979b4082c65 service nova] Releasing lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.437765] env[68437]: DEBUG oslo_vmware.api [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 792.437765] env[68437]: value = "task-2943987" [ 792.437765] env[68437]: _type = "Task" [ 792.437765] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.446545] env[68437]: DEBUG oslo_vmware.api [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.447406] env[68437]: DEBUG nova.scheduler.client.report [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.483704] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.496344] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943985, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080524} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.496626] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 792.497434] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73de5cd-cb72-4bb6-967c-a450661b8f08 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.520537] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77/6b10ff9f-3248-46fe-9cd4-19e0ebbcee77.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.520845] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bf77b5c-1c38-47cb-b1e3-4c330c8be7eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.544017] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 792.544017] env[68437]: value = "task-2943988" [ 792.544017] env[68437]: _type = "Task" [ 792.544017] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.551297] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ccbd6f-259f-679c-3a59-7853be175782, 'name': SearchDatastore_Task, 'duration_secs': 0.022575} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.552476] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2fde4c5-c30c-4b76-bde6-f8319deaffc4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.557411] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943988, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.561293] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 792.561293] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b1682a-9447-854f-0f38-4a1ef88a3ff3" [ 792.561293] env[68437]: _type = "Task" [ 792.561293] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.570287] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b1682a-9447-854f-0f38-4a1ef88a3ff3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.681508] env[68437]: DEBUG nova.network.neutron [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 792.691464] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee33453-37b8-4a23-ab63-50a954031a6b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.700184] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Doing hard reboot of VM {{(pid=68437) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 792.700534] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-1aaff1d9-fb99-41c9-bb43-924fb6ab0f52 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.710393] env[68437]: DEBUG oslo_vmware.api [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 792.710393] env[68437]: value = "task-2943989" [ 792.710393] env[68437]: _type = "Task" [ 792.710393] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.719739] env[68437]: DEBUG oslo_vmware.api [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943989, 'name': ResetVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.865737] env[68437]: DEBUG nova.network.neutron [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Updating instance_info_cache with network_info: [{"id": "361c162e-f032-4355-aa03-a7b16b7ad181", "address": "fa:16:3e:e6:4e:bd", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap361c162e-f0", "ovs_interfaceid": "361c162e-f032-4355-aa03-a7b16b7ad181", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.949676] env[68437]: DEBUG oslo_vmware.api [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2943987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402731} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.950066] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 792.950401] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 792.950597] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 792.951188] env[68437]: INFO nova.compute.manager [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 792.951496] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.952582] env[68437]: DEBUG oslo_concurrency.lockutils [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.181s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.954539] env[68437]: DEBUG nova.compute.manager [-] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 792.954735] env[68437]: DEBUG nova.network.neutron [-] [instance: e3855111-7678-42c5-a37e-25e8587416aa] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 792.956940] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.040s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.957341] env[68437]: DEBUG nova.objects.instance [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lazy-loading 'resources' on Instance uuid 5435b4d8-46c3-43e3-b11b-cbeb580e2f36 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 792.990036] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943983, 'name': ReconfigVM_Task, 'duration_secs': 1.2813} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.990290] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb/5202b708-179c-48d2-9c4e-2bb5ab1a6ebb.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 792.990918] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbd426e4-439c-493c-89f0-1ca002021b95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.999952] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 792.999952] env[68437]: value = "task-2943990" [ 792.999952] env[68437]: _type = "Task" [ 792.999952] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.000982] env[68437]: INFO nova.scheduler.client.report [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleted allocations for instance 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530 [ 793.020611] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943990, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.060449] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943988, 'name': ReconfigVM_Task, 'duration_secs': 0.409628} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.060922] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Reconfigured VM instance instance-00000024 to attach disk [datastore2] 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77/6b10ff9f-3248-46fe-9cd4-19e0ebbcee77.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.062121] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-084b964a-a5b8-468d-9226-99bb2274269e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.076020] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 793.076020] env[68437]: value = "task-2943991" [ 793.076020] env[68437]: _type = "Task" [ 793.076020] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.078930] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b1682a-9447-854f-0f38-4a1ef88a3ff3, 'name': SearchDatastore_Task, 'duration_secs': 0.018193} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.082180] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.082463] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 013a92cc-0fc2-4e85-aee6-efb62bae4dcb/013a92cc-0fc2-4e85-aee6-efb62bae4dcb.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 793.083058] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9884e0a-3c29-4c90-866c-ce246adc048f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.093158] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943991, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.094659] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 793.094659] env[68437]: value = "task-2943992" [ 793.094659] env[68437]: _type = "Task" [ 793.094659] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.105684] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943992, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.223058] env[68437]: DEBUG oslo_vmware.api [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2943989, 'name': ResetVM_Task, 'duration_secs': 0.11845} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.223362] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Did hard reboot of VM {{(pid=68437) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 793.223672] env[68437]: DEBUG nova.compute.manager [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 793.224517] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c91787-bd3a-46eb-b3b8-8150dc1b1a91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.230031] env[68437]: DEBUG nova.compute.manager [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Received event network-vif-plugged-361c162e-f032-4355-aa03-a7b16b7ad181 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 793.230360] env[68437]: DEBUG oslo_concurrency.lockutils [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] Acquiring lock "860107df-4e9b-44b1-9e85-b0ee3a827268-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.230441] env[68437]: DEBUG oslo_concurrency.lockutils [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] Lock "860107df-4e9b-44b1-9e85-b0ee3a827268-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.230610] env[68437]: DEBUG oslo_concurrency.lockutils [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] Lock "860107df-4e9b-44b1-9e85-b0ee3a827268-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.230780] env[68437]: DEBUG nova.compute.manager [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] No waiting events found dispatching network-vif-plugged-361c162e-f032-4355-aa03-a7b16b7ad181 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 793.230941] env[68437]: WARNING nova.compute.manager [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Received unexpected event network-vif-plugged-361c162e-f032-4355-aa03-a7b16b7ad181 for instance with vm_state building and task_state spawning. [ 793.231108] env[68437]: DEBUG nova.compute.manager [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Received event network-changed-361c162e-f032-4355-aa03-a7b16b7ad181 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 793.231262] env[68437]: DEBUG nova.compute.manager [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Refreshing instance network info cache due to event network-changed-361c162e-f032-4355-aa03-a7b16b7ad181. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 793.231427] env[68437]: DEBUG oslo_concurrency.lockutils [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] Acquiring lock "refresh_cache-860107df-4e9b-44b1-9e85-b0ee3a827268" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.369294] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-860107df-4e9b-44b1-9e85-b0ee3a827268" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.369541] env[68437]: DEBUG nova.compute.manager [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Instance network_info: |[{"id": "361c162e-f032-4355-aa03-a7b16b7ad181", "address": "fa:16:3e:e6:4e:bd", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap361c162e-f0", "ovs_interfaceid": "361c162e-f032-4355-aa03-a7b16b7ad181", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 793.370437] env[68437]: DEBUG oslo_concurrency.lockutils [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] Acquired lock "refresh_cache-860107df-4e9b-44b1-9e85-b0ee3a827268" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.370437] env[68437]: DEBUG nova.network.neutron [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Refreshing network info cache for port 361c162e-f032-4355-aa03-a7b16b7ad181 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 793.371856] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:4e:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '361c162e-f032-4355-aa03-a7b16b7ad181', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 793.381951] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 793.387611] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 793.389543] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f435fb55-47b1-4f63-b17d-434d3bff33a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.416032] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 793.416032] env[68437]: value = "task-2943993" [ 793.416032] env[68437]: _type = "Task" [ 793.416032] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.424759] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943993, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.517927] env[68437]: DEBUG oslo_concurrency.lockutils [None req-606975b6-39fd-42ae-bcd4-de768d5e8cd2 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.291s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.526320] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Acquiring lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.526320] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.526320] env[68437]: INFO nova.compute.manager [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Attaching volume 97cb9764-cf92-4eda-944b-9e7123615f4c to /dev/sdb [ 793.527142] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943990, 'name': Rename_Task, 'duration_secs': 0.219227} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.528029] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 793.528481] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f1235d0-4fe2-4d05-b0b5-b38200fca09e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.545504] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 793.545504] env[68437]: value = "task-2943994" [ 793.545504] env[68437]: _type = "Task" [ 793.545504] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.561336] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943994, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.573095] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5f4c7c-8f5a-4f60-8888-f2889fca6969 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.595957] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50e3c97-0b53-4eab-93cd-a794f3c31903 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.598685] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943991, 'name': Rename_Task, 'duration_secs': 0.217574} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.602911] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 793.606101] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2a30ac5-0c4e-4b11-bd5e-4d0417bf84f4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.615882] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943992, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.624073] env[68437]: DEBUG nova.virt.block_device [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Updating existing volume attachment record: 82596026-86f4-4382-8f6c-2dbc2317dcfc {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 793.626333] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 793.626333] env[68437]: value = "task-2943995" [ 793.626333] env[68437]: _type = "Task" [ 793.626333] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.639768] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943995, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.741938] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bd75529-661a-4a6f-add4-286e0fa2fadd tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "efed858a-44b9-45b7-8778-22183549088c" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.401s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.849118] env[68437]: DEBUG nova.network.neutron [-] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.855930] env[68437]: DEBUG nova.network.neutron [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Updated VIF entry in instance network info cache for port 361c162e-f032-4355-aa03-a7b16b7ad181. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 793.855930] env[68437]: DEBUG nova.network.neutron [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Updating instance_info_cache with network_info: [{"id": "361c162e-f032-4355-aa03-a7b16b7ad181", "address": "fa:16:3e:e6:4e:bd", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap361c162e-f0", "ovs_interfaceid": "361c162e-f032-4355-aa03-a7b16b7ad181", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.926924] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943993, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.056252] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943994, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.085688] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7839b15f-fd39-4ab9-89e0-d6e70666b1c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.094750] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad9503e-60e6-4862-ab84-8e8d194de287 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.128284] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37181d14-6490-48ea-9543-5fbb24ad6aaa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.134499] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943992, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544388} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.138143] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 013a92cc-0fc2-4e85-aee6-efb62bae4dcb/013a92cc-0fc2-4e85-aee6-efb62bae4dcb.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 794.138394] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 794.138624] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-410a358b-4339-4f45-96e1-b1e17bcc1270 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.143889] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff38330-97c3-479a-bee8-db139c5b40fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.151264] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943995, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.156528] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 794.156528] env[68437]: value = "task-2943999" [ 794.156528] env[68437]: _type = "Task" [ 794.156528] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.167873] env[68437]: DEBUG nova.compute.provider_tree [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.179957] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943999, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.356845] env[68437]: INFO nova.compute.manager [-] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Took 1.40 seconds to deallocate network for instance. [ 794.358998] env[68437]: DEBUG oslo_concurrency.lockutils [req-b7cb4057-9119-418e-a17f-634918eaee19 req-3ede981a-21f6-4f75-a241-72394b7d5938 service nova] Releasing lock "refresh_cache-860107df-4e9b-44b1-9e85-b0ee3a827268" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.427987] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2943993, 'name': CreateVM_Task, 'duration_secs': 0.597853} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.428266] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 794.428870] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.429008] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.429342] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 794.429574] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61220dc5-1c93-42b5-a500-c5851d54dcfc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.435186] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 794.435186] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52633fa3-6eda-1414-9f3d-4122e0794851" [ 794.435186] env[68437]: _type = "Task" [ 794.435186] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.443665] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52633fa3-6eda-1414-9f3d-4122e0794851, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.560139] env[68437]: DEBUG oslo_vmware.api [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2943994, 'name': PowerOnVM_Task, 'duration_secs': 0.817613} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.560750] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.561236] env[68437]: DEBUG nova.compute.manager [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.562602] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7501bff-8164-45af-a993-ccd9ded8e79f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.645944] env[68437]: DEBUG oslo_vmware.api [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2943995, 'name': PowerOnVM_Task, 'duration_secs': 0.591793} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.646341] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.646630] env[68437]: INFO nova.compute.manager [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Took 9.37 seconds to spawn the instance on the hypervisor. [ 794.646975] env[68437]: DEBUG nova.compute.manager [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.648228] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014902d2-0ed0-4bb5-a838-c6f0b1abd0dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.666964] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2943999, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143955} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.668027] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.668214] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c350cd8-5a6e-4463-8cec-715a923ea52f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.671529] env[68437]: DEBUG nova.scheduler.client.report [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 794.699488] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 013a92cc-0fc2-4e85-aee6-efb62bae4dcb/013a92cc-0fc2-4e85-aee6-efb62bae4dcb.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.701631] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e457e3f-776d-4462-bca2-abdeba055613 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.724328] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 794.724328] env[68437]: value = "task-2944000" [ 794.724328] env[68437]: _type = "Task" [ 794.724328] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.735986] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944000, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.862484] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.862747] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.866146] env[68437]: DEBUG oslo_concurrency.lockutils [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.897338] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "fc62ff9d-1bd8-4b32-9e71-41410276802d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.898026] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "fc62ff9d-1bd8-4b32-9e71-41410276802d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.948159] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52633fa3-6eda-1414-9f3d-4122e0794851, 'name': SearchDatastore_Task, 'duration_secs': 0.012672} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.948499] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.948791] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 794.949102] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.949407] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.949612] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 794.949947] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5191c47c-6c74-4c4d-9cfc-c0f9683528d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.960997] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 794.961297] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 794.962639] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c922123-1f75-4803-b900-dc9c920f4e47 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.969454] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 794.969454] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521b39a0-98a9-9181-7424-4a7d7bc43f2e" [ 794.969454] env[68437]: _type = "Task" [ 794.969454] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.978847] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521b39a0-98a9-9181-7424-4a7d7bc43f2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.085641] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.171840] env[68437]: INFO nova.compute.manager [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Took 40.82 seconds to build instance. [ 795.177053] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.220s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.179369] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.046s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.179606] env[68437]: DEBUG nova.objects.instance [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lazy-loading 'resources' on Instance uuid aa39767e-1ae7-4881-b0a8-e7b66e1ceed2 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 795.200664] env[68437]: INFO nova.scheduler.client.report [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Deleted allocations for instance 5435b4d8-46c3-43e3-b11b-cbeb580e2f36 [ 795.235325] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944000, 'name': ReconfigVM_Task, 'duration_secs': 0.355003} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.235648] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 013a92cc-0fc2-4e85-aee6-efb62bae4dcb/013a92cc-0fc2-4e85-aee6-efb62bae4dcb.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.236322] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d86b01d0-f177-4ec4-af1a-78a9e023f536 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.246334] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 795.246334] env[68437]: value = "task-2944001" [ 795.246334] env[68437]: _type = "Task" [ 795.246334] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.255551] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944001, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.263585] env[68437]: DEBUG nova.compute.manager [req-f4dffe78-c9c8-467e-a437-45c4423cf0ff req-7f0d533a-1900-472e-8251-a989f36fd2c6 service nova] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Received event network-vif-deleted-c5011eec-0913-4b23-b71b-534a51e35fd4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 795.263903] env[68437]: DEBUG nova.compute.manager [req-f4dffe78-c9c8-467e-a437-45c4423cf0ff req-7f0d533a-1900-472e-8251-a989f36fd2c6 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Received event network-changed-00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 795.264099] env[68437]: DEBUG nova.compute.manager [req-f4dffe78-c9c8-467e-a437-45c4423cf0ff req-7f0d533a-1900-472e-8251-a989f36fd2c6 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Refreshing instance network info cache due to event network-changed-00b2c6d2-93a6-4590-8a40-ae6de710ee7e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 795.264332] env[68437]: DEBUG oslo_concurrency.lockutils [req-f4dffe78-c9c8-467e-a437-45c4423cf0ff req-7f0d533a-1900-472e-8251-a989f36fd2c6 service nova] Acquiring lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.264475] env[68437]: DEBUG oslo_concurrency.lockutils [req-f4dffe78-c9c8-467e-a437-45c4423cf0ff req-7f0d533a-1900-472e-8251-a989f36fd2c6 service nova] Acquired lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.264633] env[68437]: DEBUG nova.network.neutron [req-f4dffe78-c9c8-467e-a437-45c4423cf0ff req-7f0d533a-1900-472e-8251-a989f36fd2c6 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Refreshing network info cache for port 00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 795.481131] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521b39a0-98a9-9181-7424-4a7d7bc43f2e, 'name': SearchDatastore_Task, 'duration_secs': 0.011877} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.481954] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b848530-7519-409e-8c13-98c9a41f08f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.488206] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 795.488206] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1f58c-d00d-813e-cc9f-9e3b8c79d6bc" [ 795.488206] env[68437]: _type = "Task" [ 795.488206] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.498870] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1f58c-d00d-813e-cc9f-9e3b8c79d6bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.672416] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "efed858a-44b9-45b7-8778-22183549088c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.672705] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "efed858a-44b9-45b7-8778-22183549088c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.672919] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "efed858a-44b9-45b7-8778-22183549088c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.673116] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "efed858a-44b9-45b7-8778-22183549088c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.673285] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "efed858a-44b9-45b7-8778-22183549088c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.675033] env[68437]: DEBUG oslo_concurrency.lockutils [None req-106ee1c1-7043-4530-8ec1-33f2e5f70f8e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.356s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.675475] env[68437]: INFO nova.compute.manager [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Terminating instance [ 795.710435] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0ca722a0-c020-48ba-bb89-28afbf7d36af tempest-ServersAdmin275Test-2055587085 tempest-ServersAdmin275Test-2055587085-project-member] Lock "5435b4d8-46c3-43e3-b11b-cbeb580e2f36" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.086s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.759185] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944001, 'name': Rename_Task, 'duration_secs': 0.223944} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.759469] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.759724] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63d23f48-f812-4f56-b2e0-d93e5073d006 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.769066] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 795.769066] env[68437]: value = "task-2944002" [ 795.769066] env[68437]: _type = "Task" [ 795.769066] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.780194] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944002, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.847229] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "5202b708-179c-48d2-9c4e-2bb5ab1a6ebb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.847528] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "5202b708-179c-48d2-9c4e-2bb5ab1a6ebb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.848457] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "5202b708-179c-48d2-9c4e-2bb5ab1a6ebb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.848457] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "5202b708-179c-48d2-9c4e-2bb5ab1a6ebb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.848457] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "5202b708-179c-48d2-9c4e-2bb5ab1a6ebb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.851695] env[68437]: INFO nova.compute.manager [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Terminating instance [ 796.002398] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1f58c-d00d-813e-cc9f-9e3b8c79d6bc, 'name': SearchDatastore_Task, 'duration_secs': 0.011997} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.002398] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.002841] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 860107df-4e9b-44b1-9e85-b0ee3a827268/860107df-4e9b-44b1-9e85-b0ee3a827268.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.002841] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cea75b77-195f-4405-abf3-05f665b11fcf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.009875] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 796.009875] env[68437]: value = "task-2944004" [ 796.009875] env[68437]: _type = "Task" [ 796.009875] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.020340] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.060114] env[68437]: DEBUG nova.network.neutron [req-f4dffe78-c9c8-467e-a437-45c4423cf0ff req-7f0d533a-1900-472e-8251-a989f36fd2c6 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Updated VIF entry in instance network info cache for port 00b2c6d2-93a6-4590-8a40-ae6de710ee7e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 796.060510] env[68437]: DEBUG nova.network.neutron [req-f4dffe78-c9c8-467e-a437-45c4423cf0ff req-7f0d533a-1900-472e-8251-a989f36fd2c6 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Updating instance_info_cache with network_info: [{"id": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "address": "fa:16:3e:1a:3e:19", "network": {"id": "9fe87b75-cc45-43c1-a256-02579ca41b44", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1273311211-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b3b9e525e494d896b8d6e874c3e8b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00b2c6d2-93", "ovs_interfaceid": "00b2c6d2-93a6-4590-8a40-ae6de710ee7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.106020] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c285968-a78f-4253-8dd6-7c3fa2f2df8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.113031] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-603cc0d0-1fd9-4848-b9ee-799db0cf828a tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Suspending the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 796.113031] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-68443640-7dde-445a-abe6-836aa1d19cac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.122108] env[68437]: DEBUG oslo_vmware.api [None req-603cc0d0-1fd9-4848-b9ee-799db0cf828a tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 796.122108] env[68437]: value = "task-2944005" [ 796.122108] env[68437]: _type = "Task" [ 796.122108] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.134178] env[68437]: DEBUG oslo_vmware.api [None req-603cc0d0-1fd9-4848-b9ee-799db0cf828a tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944005, 'name': SuspendVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.183020] env[68437]: DEBUG nova.compute.manager [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 796.186560] env[68437]: DEBUG nova.compute.manager [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 796.186985] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.197172] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080adfd5-ae77-4632-a096-ca11310da9a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.214054] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.214054] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0662ba65-ba98-41b9-abf5-3feb8196460c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.222188] env[68437]: DEBUG oslo_vmware.api [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 796.222188] env[68437]: value = "task-2944006" [ 796.222188] env[68437]: _type = "Task" [ 796.222188] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.237366] env[68437]: DEBUG oslo_vmware.api [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2944006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.249380] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dfdf1e-2004-4ca0-99c0-c5b3393de85b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.260870] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba00991a-dd66-4625-bf55-123949c3ec1b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.300373] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee61218-d1af-4db5-991b-2ed60eb5eae3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.311780] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944002, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.313411] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97dac6e7-3ea4-4260-965d-0fa3f7d1add7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.332209] env[68437]: DEBUG nova.compute.provider_tree [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.356475] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "refresh_cache-5202b708-179c-48d2-9c4e-2bb5ab1a6ebb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.356767] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquired lock "refresh_cache-5202b708-179c-48d2-9c4e-2bb5ab1a6ebb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.356985] env[68437]: DEBUG nova.network.neutron [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 796.528028] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944004, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482894} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.528028] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 860107df-4e9b-44b1-9e85-b0ee3a827268/860107df-4e9b-44b1-9e85-b0ee3a827268.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 796.528436] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 796.528436] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-161040f6-257f-410e-becf-429bc4f3ff5f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.535502] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 796.535502] env[68437]: value = "task-2944007" [ 796.535502] env[68437]: _type = "Task" [ 796.535502] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.544626] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944007, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.563632] env[68437]: DEBUG oslo_concurrency.lockutils [req-f4dffe78-c9c8-467e-a437-45c4423cf0ff req-7f0d533a-1900-472e-8251-a989f36fd2c6 service nova] Releasing lock "refresh_cache-efed858a-44b9-45b7-8778-22183549088c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.633608] env[68437]: DEBUG oslo_vmware.api [None req-603cc0d0-1fd9-4848-b9ee-799db0cf828a tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944005, 'name': SuspendVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.707817] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.731842] env[68437]: DEBUG oslo_vmware.api [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2944006, 'name': PowerOffVM_Task, 'duration_secs': 0.315588} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.732153] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.732372] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.732650] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5d5cac6-5ece-4d31-b11d-454567e65002 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.804470] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.804741] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.804935] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Deleting the datastore file [datastore1] efed858a-44b9-45b7-8778-22183549088c {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.808947] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d7b7d02-15b2-4ac6-aef0-dbf8edaf575c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.811379] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944002, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.816947] env[68437]: DEBUG oslo_vmware.api [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 796.816947] env[68437]: value = "task-2944009" [ 796.816947] env[68437]: _type = "Task" [ 796.816947] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.824711] env[68437]: DEBUG oslo_vmware.api [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2944009, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.833876] env[68437]: DEBUG nova.scheduler.client.report [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 796.878828] env[68437]: DEBUG nova.network.neutron [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 796.942449] env[68437]: DEBUG nova.network.neutron [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.046017] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944007, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.337877} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.046017] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.047047] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80cdc29-6043-4ee4-aa27-4a36750ac1e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.070176] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 860107df-4e9b-44b1-9e85-b0ee3a827268/860107df-4e9b-44b1-9e85-b0ee3a827268.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.070503] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f578f77a-4f0d-4ee6-a4a7-49c56f74f05b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.091675] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 797.091675] env[68437]: value = "task-2944010" [ 797.091675] env[68437]: _type = "Task" [ 797.091675] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.101527] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944010, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.133928] env[68437]: DEBUG oslo_vmware.api [None req-603cc0d0-1fd9-4848-b9ee-799db0cf828a tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944005, 'name': SuspendVM_Task} progress is 62%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.307443] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944002, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.327783] env[68437]: DEBUG oslo_vmware.api [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2944009, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.409862} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.327783] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.327783] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.327783] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.327783] env[68437]: INFO nova.compute.manager [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: efed858a-44b9-45b7-8778-22183549088c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 797.328304] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.328304] env[68437]: DEBUG nova.compute.manager [-] [instance: efed858a-44b9-45b7-8778-22183549088c] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 797.328304] env[68437]: DEBUG nova.network.neutron [-] [instance: efed858a-44b9-45b7-8778-22183549088c] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 797.339971] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.161s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.344148] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.161s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.347052] env[68437]: INFO nova.compute.claims [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 797.370509] env[68437]: INFO nova.scheduler.client.report [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Deleted allocations for instance aa39767e-1ae7-4881-b0a8-e7b66e1ceed2 [ 797.446329] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Releasing lock "refresh_cache-5202b708-179c-48d2-9c4e-2bb5ab1a6ebb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.446794] env[68437]: DEBUG nova.compute.manager [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 797.446950] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 797.448178] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea4a4f0-cae6-4571-af59-cc6ad4901d18 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.457677] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 797.457926] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a762577b-346e-4edf-a461-63f757ac3b63 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.465223] env[68437]: DEBUG oslo_vmware.api [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 797.465223] env[68437]: value = "task-2944011" [ 797.465223] env[68437]: _type = "Task" [ 797.465223] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.473545] env[68437]: DEBUG oslo_vmware.api [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2944011, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.601399] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944010, 'name': ReconfigVM_Task, 'duration_secs': 0.466217} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.601701] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 860107df-4e9b-44b1-9e85-b0ee3a827268/860107df-4e9b-44b1-9e85-b0ee3a827268.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 797.602414] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e712f72d-2f29-408f-b32a-420ce221aa10 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.609808] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 797.609808] env[68437]: value = "task-2944012" [ 797.609808] env[68437]: _type = "Task" [ 797.609808] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.618872] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944012, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.633900] env[68437]: DEBUG oslo_vmware.api [None req-603cc0d0-1fd9-4848-b9ee-799db0cf828a tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944005, 'name': SuspendVM_Task, 'duration_secs': 1.222869} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.634299] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-603cc0d0-1fd9-4848-b9ee-799db0cf828a tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Suspended the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 797.634504] env[68437]: DEBUG nova.compute.manager [None req-603cc0d0-1fd9-4848-b9ee-799db0cf828a tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.637966] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2becb2cd-78d5-4009-bba1-2fb4874afc6b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.805923] env[68437]: DEBUG oslo_vmware.api [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944002, 'name': PowerOnVM_Task, 'duration_secs': 1.782124} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.806217] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.806421] env[68437]: INFO nova.compute.manager [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Took 9.86 seconds to spawn the instance on the hypervisor. [ 797.806599] env[68437]: DEBUG nova.compute.manager [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.807391] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fd1a55-2ba3-4986-8181-45ec124c6439 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.841163] env[68437]: DEBUG nova.compute.manager [req-4f5f0528-fde8-4cc6-ae3f-87a34f327e1f req-1443c69b-424c-48bf-a1cc-1ff03d02afc6 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Received event network-vif-deleted-00b2c6d2-93a6-4590-8a40-ae6de710ee7e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 797.841163] env[68437]: INFO nova.compute.manager [req-4f5f0528-fde8-4cc6-ae3f-87a34f327e1f req-1443c69b-424c-48bf-a1cc-1ff03d02afc6 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Neutron deleted interface 00b2c6d2-93a6-4590-8a40-ae6de710ee7e; detaching it from the instance and deleting it from the info cache [ 797.841163] env[68437]: DEBUG nova.network.neutron [req-4f5f0528-fde8-4cc6-ae3f-87a34f327e1f req-1443c69b-424c-48bf-a1cc-1ff03d02afc6 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.879328] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cc6482fd-c758-498a-8e67-054bf1c78db7 tempest-DeleteServersAdminTestJSON-1195156319 tempest-DeleteServersAdminTestJSON-1195156319-project-member] Lock "aa39767e-1ae7-4881-b0a8-e7b66e1ceed2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.302s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.980944] env[68437]: DEBUG oslo_vmware.api [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2944011, 'name': PowerOffVM_Task, 'duration_secs': 0.168925} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.980944] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 797.980944] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 797.980944] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffe490bc-014b-4251-a348-b747240a325f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.017287] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 798.017553] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 798.017685] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Deleting the datastore file [datastore2] 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 798.017941] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-424643a7-24e2-4ca6-a6ab-d5323e450b6e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.024560] env[68437]: DEBUG oslo_vmware.api [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for the task: (returnval){ [ 798.024560] env[68437]: value = "task-2944014" [ 798.024560] env[68437]: _type = "Task" [ 798.024560] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.033279] env[68437]: DEBUG oslo_vmware.api [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2944014, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.118386] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944012, 'name': Rename_Task, 'duration_secs': 0.144879} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.118765] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 798.118879] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68d487bc-6535-4a75-b60d-a1219af827b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.125308] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 798.125308] env[68437]: value = "task-2944015" [ 798.125308] env[68437]: _type = "Task" [ 798.125308] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.133872] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944015, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.192296] env[68437]: DEBUG nova.network.neutron [-] [instance: efed858a-44b9-45b7-8778-22183549088c] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.196086] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 798.196293] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590955', 'volume_id': '97cb9764-cf92-4eda-944b-9e7123615f4c', 'name': 'volume-97cb9764-cf92-4eda-944b-9e7123615f4c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ea330078-a8f2-41f4-a161-5d0e29ddfab5', 'attached_at': '', 'detached_at': '', 'volume_id': '97cb9764-cf92-4eda-944b-9e7123615f4c', 'serial': '97cb9764-cf92-4eda-944b-9e7123615f4c'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 798.197121] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a163d71-00b6-401b-8391-ddd4013f0c07 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.214683] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b160de5-4b3c-4571-a553-324c09b72c31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.247107] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] volume-97cb9764-cf92-4eda-944b-9e7123615f4c/volume-97cb9764-cf92-4eda-944b-9e7123615f4c.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.247636] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49ddf820-7e26-48f9-ada0-2b9658761404 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.272174] env[68437]: DEBUG oslo_vmware.api [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Waiting for the task: (returnval){ [ 798.272174] env[68437]: value = "task-2944016" [ 798.272174] env[68437]: _type = "Task" [ 798.272174] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.280791] env[68437]: DEBUG oslo_vmware.api [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944016, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.326591] env[68437]: INFO nova.compute.manager [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Took 41.70 seconds to build instance. [ 798.344605] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-107d5ee0-b020-4d3b-9622-afbe46b2af2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.354547] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5831d78f-72da-4bc3-bfcb-19034560a534 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.396797] env[68437]: DEBUG nova.compute.manager [req-4f5f0528-fde8-4cc6-ae3f-87a34f327e1f req-1443c69b-424c-48bf-a1cc-1ff03d02afc6 service nova] [instance: efed858a-44b9-45b7-8778-22183549088c] Detach interface failed, port_id=00b2c6d2-93a6-4590-8a40-ae6de710ee7e, reason: Instance efed858a-44b9-45b7-8778-22183549088c could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 798.536652] env[68437]: DEBUG oslo_vmware.api [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Task: {'id': task-2944014, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104108} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.539626] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 798.540040] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 798.540298] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 798.540486] env[68437]: INFO nova.compute.manager [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Took 1.09 seconds to destroy the instance on the hypervisor. [ 798.540728] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 798.542108] env[68437]: DEBUG nova.compute.manager [-] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 798.542108] env[68437]: DEBUG nova.network.neutron [-] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 798.588429] env[68437]: DEBUG nova.network.neutron [-] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 798.639339] env[68437]: DEBUG oslo_vmware.api [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944015, 'name': PowerOnVM_Task, 'duration_secs': 0.511735} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.641999] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 798.642281] env[68437]: INFO nova.compute.manager [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Took 8.03 seconds to spawn the instance on the hypervisor. [ 798.642512] env[68437]: DEBUG nova.compute.manager [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.643789] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3171688-bb74-4ff4-9fb5-0589020db391 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.694602] env[68437]: INFO nova.compute.manager [-] [instance: efed858a-44b9-45b7-8778-22183549088c] Took 1.37 seconds to deallocate network for instance. [ 798.782822] env[68437]: DEBUG oslo_vmware.api [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944016, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.832774] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2c9214eb-4aec-49d8-8366-6ea59bf4d3cb tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.849s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.918467] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4e68ad-0519-4332-b892-6744864abe3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.929059] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf34eea-c91e-411c-bf57-7fe15d76f86d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.981871] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020f1724-c761-4007-abdf-ee314ef89932 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.992720] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896b2d27-35cf-41e7-9db4-7e0e444ec93b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.011618] env[68437]: DEBUG nova.compute.provider_tree [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.090834] env[68437]: DEBUG nova.network.neutron [-] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.161748] env[68437]: INFO nova.compute.manager [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Took 41.63 seconds to build instance. [ 799.206908] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.283914] env[68437]: DEBUG oslo_vmware.api [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944016, 'name': ReconfigVM_Task, 'duration_secs': 0.714908} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.284615] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Reconfigured VM instance instance-00000009 to attach disk [datastore1] volume-97cb9764-cf92-4eda-944b-9e7123615f4c/volume-97cb9764-cf92-4eda-944b-9e7123615f4c.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 799.290701] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b15d38e-22d9-46c9-9379-746a97fbba53 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.311035] env[68437]: DEBUG oslo_vmware.api [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Waiting for the task: (returnval){ [ 799.311035] env[68437]: value = "task-2944017" [ 799.311035] env[68437]: _type = "Task" [ 799.311035] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.320439] env[68437]: DEBUG oslo_vmware.api [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.337072] env[68437]: DEBUG nova.compute.manager [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 799.483133] env[68437]: DEBUG nova.compute.manager [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 799.484094] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5d8225-5388-474a-97f9-2af488d3b655 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.513951] env[68437]: DEBUG nova.scheduler.client.report [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 799.594857] env[68437]: INFO nova.compute.manager [-] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Took 1.05 seconds to deallocate network for instance. [ 799.601584] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "ba0d8067-a617-4910-b2f6-33a7be461f8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.601828] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "ba0d8067-a617-4910-b2f6-33a7be461f8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.665745] env[68437]: DEBUG oslo_concurrency.lockutils [None req-897af057-95c8-434c-8214-e1382e5970b1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "860107df-4e9b-44b1-9e85-b0ee3a827268" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.722s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.826238] env[68437]: DEBUG oslo_vmware.api [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944017, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.869863] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.998194] env[68437]: INFO nova.compute.manager [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] instance snapshotting [ 799.998396] env[68437]: WARNING nova.compute.manager [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 800.002366] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f55db7-5aed-4002-a7c7-5d86f924d2f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.032527] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.033084] env[68437]: DEBUG nova.compute.manager [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 800.041024] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 36.795s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.041024] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9820588d-6895-4fe5-a1cc-cfaffb9c9565 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.104901] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.170969] env[68437]: DEBUG nova.compute.manager [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 800.325103] env[68437]: DEBUG oslo_vmware.api [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944017, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.550453] env[68437]: DEBUG nova.compute.utils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 800.563099] env[68437]: DEBUG nova.compute.manager [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 800.563449] env[68437]: DEBUG nova.network.neutron [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 800.566268] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 800.566409] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-82de5a75-184f-4e2f-be74-6620e5949ca2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.576626] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 800.576626] env[68437]: value = "task-2944018" [ 800.576626] env[68437]: _type = "Task" [ 800.576626] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.585797] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944018, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.629676] env[68437]: DEBUG nova.policy [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '349e5043a5234f348e51dd8d430a043f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0003f48b0864b2c9e6a37fa0515f577', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 800.707344] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.824361] env[68437]: DEBUG oslo_vmware.api [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944017, 'name': ReconfigVM_Task, 'duration_secs': 1.154134} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.824839] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590955', 'volume_id': '97cb9764-cf92-4eda-944b-9e7123615f4c', 'name': 'volume-97cb9764-cf92-4eda-944b-9e7123615f4c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ea330078-a8f2-41f4-a161-5d0e29ddfab5', 'attached_at': '', 'detached_at': '', 'volume_id': '97cb9764-cf92-4eda-944b-9e7123615f4c', 'serial': '97cb9764-cf92-4eda-944b-9e7123615f4c'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 800.898125] env[68437]: INFO nova.compute.manager [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Rebuilding instance [ 800.942878] env[68437]: DEBUG nova.compute.manager [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 800.943773] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83ffb71-070a-4b2e-b2ce-f2338878d5a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.065102] env[68437]: DEBUG nova.compute.manager [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 801.092569] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944018, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.108251] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 0484ccee-f003-4101-87c5-fed92f095d2d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 801.108561] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 45595615-59c0-4c59-b18c-b49a3126dbb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.108621] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 27c18765-38cf-41d6-9139-9acffa94fbe6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.108692] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ea330078-a8f2-41f4-a161-5d0e29ddfab5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.108839] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 19dde8dd-eae6-41a0-b147-c505db1cda15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.108958] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 2f368262-0825-4ccc-9b1e-523b705bcfce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.109105] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance cf394b0b-cb14-4ae1-81bb-622c951bfdab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.109232] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance cf691a81-60e3-40ed-ba80-8f481ff2554b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.109344] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance d5db3112-88c7-43af-a434-b91ca69f8559 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.109455] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 07d98c5c-ede8-4001-93b2-1b1d83687ca1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.109575] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ad773afa-fc0a-4380-901d-af013ce55f2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.109691] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance c74569b8-dfc9-4a74-9d25-74b484bd9477 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.109799] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 1186da93-57aa-40f4-8aae-702d039844d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.109935] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance b92efa60-ef18-4578-b00d-6a2438e7eacf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.110702] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 9a7c248f-5262-4f03-aace-f22c4976bb0f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.110933] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 56cfa7f3-12ad-42d0-a27f-ab8136a903ee is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 801.111091] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance f1230046-d368-40ee-b1fa-99df4ab15a10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.111234] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance e3855111-7678-42c5-a37e-25e8587416aa is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 801.111357] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 39c532b1-b05e-4354-ad8f-9223b06e9488 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.111485] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 801.111611] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance efed858a-44b9-45b7-8778-22183549088c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 801.111728] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.111884] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 013a92cc-0fc2-4e85-aee6-efb62bae4dcb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.112049] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 860107df-4e9b-44b1-9e85-b0ee3a827268 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.112247] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 995a3eae-c025-4efa-b509-0bf678bb0388 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 801.205182] env[68437]: DEBUG nova.network.neutron [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Successfully created port: b0fdf74e-1890-4cc6-97a1-0345a2a42b11 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 801.590697] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944018, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.616258] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance c5af19d6-5534-45e6-8c9c-dacf30d4fb1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 801.894020] env[68437]: DEBUG nova.objects.instance [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Lazy-loading 'flavor' on Instance uuid ea330078-a8f2-41f4-a161-5d0e29ddfab5 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 801.961357] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 801.961673] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-388056e0-f979-4757-84b4-e38518028c66 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.974707] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 801.974707] env[68437]: value = "task-2944019" [ 801.974707] env[68437]: _type = "Task" [ 801.974707] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.985418] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.011125] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquiring lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.011410] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.087945] env[68437]: DEBUG nova.compute.manager [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 802.095534] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944018, 'name': CreateSnapshot_Task, 'duration_secs': 1.223896} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.095747] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 802.096649] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41786b35-eb03-4ba2-a644-dea783d291fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.115837] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 802.116163] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 802.116377] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 802.116590] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 802.116760] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 802.116940] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 802.117330] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 802.117628] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 802.117865] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 802.118013] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 802.118209] env[68437]: DEBUG nova.virt.hardware [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 802.119477] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a116d7-e24c-4877-b6eb-179e716a748b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.122701] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8ccd7176-55c0-4118-a07e-3c4bdbba9795 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 802.130256] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8042e8f0-fede-40b6-9386-4eaf3f694771 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.403764] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7337aeb6-6933-41e5-8bad-85e1592e0493 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.877s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.485822] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944019, 'name': PowerOffVM_Task, 'duration_secs': 0.242213} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.485980] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 802.486261] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.487110] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9d74ca-0d2c-495c-8677-8d6996a69410 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.493690] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 802.494008] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e9d0cb9-2bd0-49fc-a5fc-f96325f54a54 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.575342] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 802.575342] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 802.575342] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleting the datastore file [datastore2] 860107df-4e9b-44b1-9e85-b0ee3a827268 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 802.575342] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c379f83-00ba-4b2c-bdc0-e2d69f16f4d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.591733] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 802.591733] env[68437]: value = "task-2944021" [ 802.591733] env[68437]: _type = "Task" [ 802.591733] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.600827] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.617385] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 802.618245] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1045676f-3ae5-4f71-a7e0-f8d992e6d483 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.625686] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 29e9555b-f928-43e7-a3a3-869ed07d7326 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 802.632287] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 802.632287] env[68437]: value = "task-2944022" [ 802.632287] env[68437]: _type = "Task" [ 802.632287] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.639592] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944022, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.909099] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Acquiring lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.909429] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.101569] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.388443} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.101829] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.102197] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.102380] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.131535] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.144577] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944022, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.178397] env[68437]: DEBUG nova.compute.manager [req-1ca5ed0e-5920-4fd7-8a47-d71809ff3f7e req-e28a40b9-968b-4406-95a4-0a92ccab7703 service nova] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Received event network-vif-plugged-b0fdf74e-1890-4cc6-97a1-0345a2a42b11 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 803.178647] env[68437]: DEBUG oslo_concurrency.lockutils [req-1ca5ed0e-5920-4fd7-8a47-d71809ff3f7e req-e28a40b9-968b-4406-95a4-0a92ccab7703 service nova] Acquiring lock "995a3eae-c025-4efa-b509-0bf678bb0388-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.179083] env[68437]: DEBUG oslo_concurrency.lockutils [req-1ca5ed0e-5920-4fd7-8a47-d71809ff3f7e req-e28a40b9-968b-4406-95a4-0a92ccab7703 service nova] Lock "995a3eae-c025-4efa-b509-0bf678bb0388-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.179083] env[68437]: DEBUG oslo_concurrency.lockutils [req-1ca5ed0e-5920-4fd7-8a47-d71809ff3f7e req-e28a40b9-968b-4406-95a4-0a92ccab7703 service nova] Lock "995a3eae-c025-4efa-b509-0bf678bb0388-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.179200] env[68437]: DEBUG nova.compute.manager [req-1ca5ed0e-5920-4fd7-8a47-d71809ff3f7e req-e28a40b9-968b-4406-95a4-0a92ccab7703 service nova] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] No waiting events found dispatching network-vif-plugged-b0fdf74e-1890-4cc6-97a1-0345a2a42b11 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 803.179365] env[68437]: WARNING nova.compute.manager [req-1ca5ed0e-5920-4fd7-8a47-d71809ff3f7e req-e28a40b9-968b-4406-95a4-0a92ccab7703 service nova] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Received unexpected event network-vif-plugged-b0fdf74e-1890-4cc6-97a1-0345a2a42b11 for instance with vm_state building and task_state spawning. [ 803.407870] env[68437]: DEBUG nova.network.neutron [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Successfully updated port: b0fdf74e-1890-4cc6-97a1-0345a2a42b11 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 803.412529] env[68437]: INFO nova.compute.manager [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Detaching volume 97cb9764-cf92-4eda-944b-9e7123615f4c [ 803.452794] env[68437]: INFO nova.virt.block_device [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Attempting to driver detach volume 97cb9764-cf92-4eda-944b-9e7123615f4c from mountpoint /dev/sdb [ 803.452885] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 803.453062] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590955', 'volume_id': '97cb9764-cf92-4eda-944b-9e7123615f4c', 'name': 'volume-97cb9764-cf92-4eda-944b-9e7123615f4c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ea330078-a8f2-41f4-a161-5d0e29ddfab5', 'attached_at': '', 'detached_at': '', 'volume_id': '97cb9764-cf92-4eda-944b-9e7123615f4c', 'serial': '97cb9764-cf92-4eda-944b-9e7123615f4c'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 803.453939] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66d90fb-9863-4eb6-8291-11001bd3ddb2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.477130] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd86443-4818-45da-bce1-490fa8fbc278 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.483964] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3c1584-fc87-4de1-b132-bd3260cb7059 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.504616] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614827df-02d4-431f-9551-ff55c21eecae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.519289] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] The volume has not been displaced from its original location: [datastore1] volume-97cb9764-cf92-4eda-944b-9e7123615f4c/volume-97cb9764-cf92-4eda-944b-9e7123615f4c.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 803.524591] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Reconfiguring VM instance instance-00000009 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 803.524910] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-935956d0-ec1d-4c95-b1c1-9d8aab5c6547 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.542619] env[68437]: DEBUG oslo_vmware.api [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Waiting for the task: (returnval){ [ 803.542619] env[68437]: value = "task-2944023" [ 803.542619] env[68437]: _type = "Task" [ 803.542619] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.550423] env[68437]: DEBUG oslo_vmware.api [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944023, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.641698] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance acbf4c5c-341c-4ebd-ad29-90ebf531aa86 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 803.648881] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944022, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.911021] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquiring lock "refresh_cache-995a3eae-c025-4efa-b509-0bf678bb0388" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.911021] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquired lock "refresh_cache-995a3eae-c025-4efa-b509-0bf678bb0388" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.911021] env[68437]: DEBUG nova.network.neutron [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 804.053457] env[68437]: DEBUG oslo_vmware.api [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944023, 'name': ReconfigVM_Task, 'duration_secs': 0.490725} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.053762] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Reconfigured VM instance instance-00000009 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 804.058528] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b513ea32-ca26-4513-9318-b9f0a3d75c5a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.074908] env[68437]: DEBUG oslo_vmware.api [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Waiting for the task: (returnval){ [ 804.074908] env[68437]: value = "task-2944024" [ 804.074908] env[68437]: _type = "Task" [ 804.074908] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.083176] env[68437]: DEBUG oslo_vmware.api [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944024, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.138248] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 804.138419] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.138502] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 804.138673] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.138828] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 804.138974] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 804.139201] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 804.139355] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 804.139519] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 804.139680] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 804.139851] env[68437]: DEBUG nova.virt.hardware [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 804.140839] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9187103b-365f-4e6b-aa27-66e5c5b913fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.146838] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance a01364f9-e30d-4140-ae41-1e7c4aaa2251 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 804.156193] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6aeb11-67c9-411b-831d-e69490047058 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.160094] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944022, 'name': CloneVM_Task} progress is 95%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.173924] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:4e:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '361c162e-f032-4355-aa03-a7b16b7ad181', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 804.181580] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 804.182088] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 804.182325] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e374671-8bc0-45c3-8be1-4fcc3dd98727 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.205297] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 804.205297] env[68437]: value = "task-2944025" [ 804.205297] env[68437]: _type = "Task" [ 804.205297] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.450836] env[68437]: DEBUG nova.network.neutron [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 804.584275] env[68437]: DEBUG oslo_vmware.api [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Task: {'id': task-2944024, 'name': ReconfigVM_Task, 'duration_secs': 0.159535} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.584498] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590955', 'volume_id': '97cb9764-cf92-4eda-944b-9e7123615f4c', 'name': 'volume-97cb9764-cf92-4eda-944b-9e7123615f4c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ea330078-a8f2-41f4-a161-5d0e29ddfab5', 'attached_at': '', 'detached_at': '', 'volume_id': '97cb9764-cf92-4eda-944b-9e7123615f4c', 'serial': '97cb9764-cf92-4eda-944b-9e7123615f4c'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 804.625485] env[68437]: DEBUG nova.network.neutron [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Updating instance_info_cache with network_info: [{"id": "b0fdf74e-1890-4cc6-97a1-0345a2a42b11", "address": "fa:16:3e:0b:d4:ed", "network": {"id": "a4b2a2a2-0e7e-44fb-a1cc-550d92402f21", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1746195985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0003f48b0864b2c9e6a37fa0515f577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0fdf74e-18", "ovs_interfaceid": "b0fdf74e-1890-4cc6-97a1-0345a2a42b11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.649894] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance b7706bf2-936f-439c-8e9f-b2241d0c211c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 804.651093] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944022, 'name': CloneVM_Task, 'duration_secs': 1.715289} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.651336] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Created linked-clone VM from snapshot [ 804.652254] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fc8863-ee62-4713-9ebd-792281db31f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.660343] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Uploading image ae3d5eb2-c744-4978-91bb-b1b9ef05e554 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 804.684351] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 804.684351] env[68437]: value = "vm-590957" [ 804.684351] env[68437]: _type = "VirtualMachine" [ 804.684351] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 804.684539] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-96b5bfa1-c213-48d6-bd96-33f5d7c00b1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.690533] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lease: (returnval){ [ 804.690533] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52571140-bc77-86be-28ec-96fcf943d980" [ 804.690533] env[68437]: _type = "HttpNfcLease" [ 804.690533] env[68437]: } obtained for exporting VM: (result){ [ 804.690533] env[68437]: value = "vm-590957" [ 804.690533] env[68437]: _type = "VirtualMachine" [ 804.690533] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 804.690773] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the lease: (returnval){ [ 804.690773] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52571140-bc77-86be-28ec-96fcf943d980" [ 804.690773] env[68437]: _type = "HttpNfcLease" [ 804.690773] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 804.696807] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 804.696807] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52571140-bc77-86be-28ec-96fcf943d980" [ 804.696807] env[68437]: _type = "HttpNfcLease" [ 804.696807] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 804.713975] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944025, 'name': CreateVM_Task, 'duration_secs': 0.356488} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.714149] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 804.714788] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.714951] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 804.715481] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 804.715573] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69ef087f-4b3b-4376-8bdf-514a147d194b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.719622] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 804.719622] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a96a95-5cfc-11c2-f841-5cfb50edf58d" [ 804.719622] env[68437]: _type = "Task" [ 804.719622] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.727212] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a96a95-5cfc-11c2-f841-5cfb50edf58d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.127952] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Releasing lock "refresh_cache-995a3eae-c025-4efa-b509-0bf678bb0388" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.128361] env[68437]: DEBUG nova.compute.manager [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Instance network_info: |[{"id": "b0fdf74e-1890-4cc6-97a1-0345a2a42b11", "address": "fa:16:3e:0b:d4:ed", "network": {"id": "a4b2a2a2-0e7e-44fb-a1cc-550d92402f21", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1746195985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0003f48b0864b2c9e6a37fa0515f577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0fdf74e-18", "ovs_interfaceid": "b0fdf74e-1890-4cc6-97a1-0345a2a42b11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 805.128809] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:d4:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0fdf74e-1890-4cc6-97a1-0345a2a42b11', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 805.137279] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Creating folder: Project (a0003f48b0864b2c9e6a37fa0515f577). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 805.137279] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c5aa322-4cbd-438a-ad62-01a704516121 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.145822] env[68437]: DEBUG nova.objects.instance [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Lazy-loading 'flavor' on Instance uuid ea330078-a8f2-41f4-a161-5d0e29ddfab5 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.149329] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Created folder: Project (a0003f48b0864b2c9e6a37fa0515f577) in parent group-v590848. [ 805.149624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Creating folder: Instances. Parent ref: group-v590959. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 805.149903] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26faea1e-0399-4a3d-abe6-8237fbfceb56 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.153444] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 805.161537] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Created folder: Instances in parent group-v590959. [ 805.161784] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 805.161976] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 805.162811] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93dc36f7-e11f-4aac-adb0-3c42b9667f04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.182613] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 805.182613] env[68437]: value = "task-2944029" [ 805.182613] env[68437]: _type = "Task" [ 805.182613] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.190537] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944029, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.197726] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 805.197726] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52571140-bc77-86be-28ec-96fcf943d980" [ 805.197726] env[68437]: _type = "HttpNfcLease" [ 805.197726] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 805.198028] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 805.198028] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52571140-bc77-86be-28ec-96fcf943d980" [ 805.198028] env[68437]: _type = "HttpNfcLease" [ 805.198028] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 805.198742] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc759e89-532e-4e1c-9bda-7670bcf6ff22 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.207040] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d48a84-3995-864e-3cd6-3e81cfa88bd4/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 805.207593] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d48a84-3995-864e-3cd6-3e81cfa88bd4/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 805.273712] env[68437]: DEBUG nova.compute.manager [req-db49402a-b7e6-44ea-89fa-fb930eebbbcd req-13daa7a7-9b08-40bb-b713-950c9f1ef254 service nova] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Received event network-changed-b0fdf74e-1890-4cc6-97a1-0345a2a42b11 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 805.273915] env[68437]: DEBUG nova.compute.manager [req-db49402a-b7e6-44ea-89fa-fb930eebbbcd req-13daa7a7-9b08-40bb-b713-950c9f1ef254 service nova] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Refreshing instance network info cache due to event network-changed-b0fdf74e-1890-4cc6-97a1-0345a2a42b11. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 805.274141] env[68437]: DEBUG oslo_concurrency.lockutils [req-db49402a-b7e6-44ea-89fa-fb930eebbbcd req-13daa7a7-9b08-40bb-b713-950c9f1ef254 service nova] Acquiring lock "refresh_cache-995a3eae-c025-4efa-b509-0bf678bb0388" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.274295] env[68437]: DEBUG oslo_concurrency.lockutils [req-db49402a-b7e6-44ea-89fa-fb930eebbbcd req-13daa7a7-9b08-40bb-b713-950c9f1ef254 service nova] Acquired lock "refresh_cache-995a3eae-c025-4efa-b509-0bf678bb0388" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.274447] env[68437]: DEBUG nova.network.neutron [req-db49402a-b7e6-44ea-89fa-fb930eebbbcd req-13daa7a7-9b08-40bb-b713-950c9f1ef254 service nova] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Refreshing network info cache for port b0fdf74e-1890-4cc6-97a1-0345a2a42b11 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 805.283741] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a96a95-5cfc-11c2-f841-5cfb50edf58d, 'name': SearchDatastore_Task, 'duration_secs': 0.010117} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.283990] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 805.284276] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 805.284516] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.284660] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.284862] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 805.285161] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49013e35-075e-4f16-878e-92904e5cef3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.294849] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 805.295066] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 805.295876] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a913b5e-2087-4b46-a537-f99334994fc1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.302789] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 805.302789] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52695b8f-5d00-c7c6-cfa1-15a1da63cf04" [ 805.302789] env[68437]: _type = "Task" [ 805.302789] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.311402] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52695b8f-5d00-c7c6-cfa1-15a1da63cf04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.322506] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-41fdd8c0-0929-44b2-9229-0a468d623708 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.656331] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 26985e45-21ff-40bb-ac2b-c6f3700ccc97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 805.693240] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944029, 'name': CreateVM_Task, 'duration_secs': 0.387989} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.693552] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 805.694318] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.694478] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.694866] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 805.695615] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-459e7546-5452-4e8a-8a51-cd9aae5ef96e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.700082] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 805.700082] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522798db-ee68-6e3a-0d0a-3f6dbe70b6d9" [ 805.700082] env[68437]: _type = "Task" [ 805.700082] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.708336] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522798db-ee68-6e3a-0d0a-3f6dbe70b6d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.813717] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52695b8f-5d00-c7c6-cfa1-15a1da63cf04, 'name': SearchDatastore_Task, 'duration_secs': 0.010766} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.814756] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e2be0eb-d424-4fad-b2eb-fcd1ceb1f347 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.820599] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 805.820599] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52932870-a7ed-1ea6-da37-a3ce28fda316" [ 805.820599] env[68437]: _type = "Task" [ 805.820599] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.828838] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52932870-a7ed-1ea6-da37-a3ce28fda316, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.129330] env[68437]: DEBUG nova.network.neutron [req-db49402a-b7e6-44ea-89fa-fb930eebbbcd req-13daa7a7-9b08-40bb-b713-950c9f1ef254 service nova] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Updated VIF entry in instance network info cache for port b0fdf74e-1890-4cc6-97a1-0345a2a42b11. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 806.129704] env[68437]: DEBUG nova.network.neutron [req-db49402a-b7e6-44ea-89fa-fb930eebbbcd req-13daa7a7-9b08-40bb-b713-950c9f1ef254 service nova] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Updating instance_info_cache with network_info: [{"id": "b0fdf74e-1890-4cc6-97a1-0345a2a42b11", "address": "fa:16:3e:0b:d4:ed", "network": {"id": "a4b2a2a2-0e7e-44fb-a1cc-550d92402f21", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1746195985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0003f48b0864b2c9e6a37fa0515f577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0fdf74e-18", "ovs_interfaceid": "b0fdf74e-1890-4cc6-97a1-0345a2a42b11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.154228] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cdee862a-fc08-45c6-be23-50ffbf6cfa01 tempest-VolumesAssistedSnapshotsTest-2004278883 tempest-VolumesAssistedSnapshotsTest-2004278883-project-admin] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.245s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.159380] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ed1a81fd-dd4b-4126-96de-3c3f67cdca31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 806.212481] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522798db-ee68-6e3a-0d0a-3f6dbe70b6d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010279} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.212917] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.213486] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 806.213805] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.334122] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52932870-a7ed-1ea6-da37-a3ce28fda316, 'name': SearchDatastore_Task, 'duration_secs': 0.009762} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.335120] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.335576] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 860107df-4e9b-44b1-9e85-b0ee3a827268/860107df-4e9b-44b1-9e85-b0ee3a827268.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 806.336118] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.336470] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 806.336921] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-449ce93d-37e1-4522-85d7-8b2c8511207b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.340945] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae7c1688-c575-48e3-8363-72c8109eff8c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.349999] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 806.349999] env[68437]: value = "task-2944030" [ 806.349999] env[68437]: _type = "Task" [ 806.349999] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.352199] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 806.352491] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 806.356171] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d948927-f132-4554-9356-8b58cc70db0f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.362086] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 806.362086] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523d8696-0039-b15f-b119-e0292f75484d" [ 806.362086] env[68437]: _type = "Task" [ 806.362086] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.365603] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944030, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.373467] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523d8696-0039-b15f-b119-e0292f75484d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.633054] env[68437]: DEBUG oslo_concurrency.lockutils [req-db49402a-b7e6-44ea-89fa-fb930eebbbcd req-13daa7a7-9b08-40bb-b713-950c9f1ef254 service nova] Releasing lock "refresh_cache-995a3eae-c025-4efa-b509-0bf678bb0388" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.662586] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance fc62ff9d-1bd8-4b32-9e71-41410276802d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 806.864344] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944030, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497203} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.865849] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 860107df-4e9b-44b1-9e85-b0ee3a827268/860107df-4e9b-44b1-9e85-b0ee3a827268.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 806.865849] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.865849] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94384916-ebdc-4fc9-9b49-792641b5f3d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.878730] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523d8696-0039-b15f-b119-e0292f75484d, 'name': SearchDatastore_Task, 'duration_secs': 0.012195} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.881120] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 806.881120] env[68437]: value = "task-2944032" [ 806.881120] env[68437]: _type = "Task" [ 806.881120] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.881382] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9b857d3-eb32-4fe4-94e8-30cdfc6ec0ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.891625] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944032, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.893075] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 806.893075] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b206e0-8e5c-f942-84de-434165f8a1d7" [ 806.893075] env[68437]: _type = "Task" [ 806.893075] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.900874] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b206e0-8e5c-f942-84de-434165f8a1d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.166217] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ba0d8067-a617-4910-b2f6-33a7be461f8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 807.167053] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 807.167053] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4416MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 807.395852] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944032, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072132} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.399408] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 807.402650] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352e71ed-9f9b-4d97-ae25-aa29f8d41349 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.411127] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b206e0-8e5c-f942-84de-434165f8a1d7, 'name': SearchDatastore_Task, 'duration_secs': 0.010165} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.421007] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.421311] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 995a3eae-c025-4efa-b509-0bf678bb0388/995a3eae-c025-4efa-b509-0bf678bb0388.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 807.430257] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 860107df-4e9b-44b1-9e85-b0ee3a827268/860107df-4e9b-44b1-9e85-b0ee3a827268.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 807.433357] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39fc4304-3801-492b-b690-4f93128a57e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.435653] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cfbbe340-20b0-4045-a199-a601bbc09ab6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.455538] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 807.455538] env[68437]: value = "task-2944034" [ 807.455538] env[68437]: _type = "Task" [ 807.455538] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.456912] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 807.456912] env[68437]: value = "task-2944033" [ 807.456912] env[68437]: _type = "Task" [ 807.456912] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.475094] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944034, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.478464] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944033, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.790093] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726c78e7-8bf5-4443-a43e-8b281302df75 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.805716] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bea4c3-0dd5-4e97-9046-acc5fc41abe5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.856955] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af86723-6d76-4287-a7c1-0e5d87aa5262 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.866832] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda76a0d-b389-4507-a85a-05b42351ff68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.883432] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.971890] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944034, 'name': ReconfigVM_Task, 'duration_secs': 0.511881} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.972147] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944033, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.972404] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 860107df-4e9b-44b1-9e85-b0ee3a827268/860107df-4e9b-44b1-9e85-b0ee3a827268.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.973051] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea3c15db-af75-4e7c-9872-6346a3d96998 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.978975] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 807.978975] env[68437]: value = "task-2944035" [ 807.978975] env[68437]: _type = "Task" [ 807.978975] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.991654] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944035, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.259225] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "c74569b8-dfc9-4a74-9d25-74b484bd9477" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.260112] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "c74569b8-dfc9-4a74-9d25-74b484bd9477" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.260112] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "c74569b8-dfc9-4a74-9d25-74b484bd9477-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.260112] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "c74569b8-dfc9-4a74-9d25-74b484bd9477-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.260444] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "c74569b8-dfc9-4a74-9d25-74b484bd9477-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.262722] env[68437]: INFO nova.compute.manager [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Terminating instance [ 808.388685] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 808.474119] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944033, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.487636] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944035, 'name': Rename_Task, 'duration_secs': 0.171601} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.487924] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 808.488195] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a942cd43-a1ee-4260-89a5-85da60bf9d3d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.496135] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 808.496135] env[68437]: value = "task-2944036" [ 808.496135] env[68437]: _type = "Task" [ 808.496135] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.505864] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.766690] env[68437]: DEBUG nova.compute.manager [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 808.767329] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 808.768493] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3575bf-7d7e-4c51-b2b3-746a931a565a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.776698] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 808.776958] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f48cf504-e822-4fbb-89ec-5ccc1f75ba8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.783125] env[68437]: DEBUG oslo_vmware.api [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 808.783125] env[68437]: value = "task-2944037" [ 808.783125] env[68437]: _type = "Task" [ 808.783125] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.792954] env[68437]: DEBUG oslo_vmware.api [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.849287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.849287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.894068] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 808.894390] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.858s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.894769] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.904s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.896323] env[68437]: INFO nova.compute.claims [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 808.976176] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944033, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.466705} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.976465] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 995a3eae-c025-4efa-b509-0bf678bb0388/995a3eae-c025-4efa-b509-0bf678bb0388.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 808.976915] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 808.977291] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db6fe464-eba3-420d-9338-0e1d5171ac0a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.987886] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 808.987886] env[68437]: value = "task-2944038" [ 808.987886] env[68437]: _type = "Task" [ 808.987886] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.001987] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944038, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.011787] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944036, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.295168] env[68437]: DEBUG oslo_vmware.api [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944037, 'name': PowerOffVM_Task, 'duration_secs': 0.215337} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.295854] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 809.295854] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 809.296189] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf4e89b2-d37e-44c8-bd6f-191c1e04cc4b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.366990] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 809.367071] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 809.367268] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Deleting the datastore file [datastore1] c74569b8-dfc9-4a74-9d25-74b484bd9477 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 809.367563] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16b6d096-3709-4443-adee-f1c6fbb50cd6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.374437] env[68437]: DEBUG oslo_vmware.api [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 809.374437] env[68437]: value = "task-2944040" [ 809.374437] env[68437]: _type = "Task" [ 809.374437] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.384261] env[68437]: DEBUG oslo_vmware.api [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.498606] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944038, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07994} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.499742] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 809.500672] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66ef365-b6e5-42e6-a565-eb9baf4f3405 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.530441] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 995a3eae-c025-4efa-b509-0bf678bb0388/995a3eae-c025-4efa-b509-0bf678bb0388.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 809.535046] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c16aac92-55c7-4380-a23b-b755db44a8d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.550086] env[68437]: DEBUG oslo_concurrency.lockutils [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquiring lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.550358] env[68437]: DEBUG oslo_concurrency.lockutils [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.550574] env[68437]: DEBUG oslo_concurrency.lockutils [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquiring lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.550752] env[68437]: DEBUG oslo_concurrency.lockutils [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.550930] env[68437]: DEBUG oslo_concurrency.lockutils [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.552580] env[68437]: DEBUG oslo_vmware.api [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944036, 'name': PowerOnVM_Task, 'duration_secs': 0.639401} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.553767] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 809.553767] env[68437]: DEBUG nova.compute.manager [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 809.554852] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a203dc-3452-4cff-a40e-b33ddd15fff5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.558106] env[68437]: INFO nova.compute.manager [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Terminating instance [ 809.561435] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 809.561435] env[68437]: value = "task-2944041" [ 809.561435] env[68437]: _type = "Task" [ 809.561435] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.578632] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944041, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.885484] env[68437]: DEBUG oslo_vmware.api [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15471} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.885647] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 809.885825] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 809.885999] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 809.886267] env[68437]: INFO nova.compute.manager [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Took 1.12 seconds to destroy the instance on the hypervisor. [ 809.886524] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 809.886738] env[68437]: DEBUG nova.compute.manager [-] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 809.886835] env[68437]: DEBUG nova.network.neutron [-] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 810.071800] env[68437]: DEBUG nova.compute.manager [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 810.072031] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 810.072911] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82249090-f1e8-4d91-8919-d2cf8ba84721 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.087087] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944041, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.096013] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 810.096533] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.096772] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79124d66-71f0-4dc3-be6a-d2c2752a0699 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.105076] env[68437]: DEBUG oslo_vmware.api [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 810.105076] env[68437]: value = "task-2944042" [ 810.105076] env[68437]: _type = "Task" [ 810.105076] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.119262] env[68437]: DEBUG oslo_vmware.api [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2944042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.471851] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4888be-01d9-4464-879f-78f9a701ddcd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.480338] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5245b1ae-eb31-4d6b-b75f-d9023ecb2ca7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.517415] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5eca83-e1b9-4e26-b2ba-bf413a072d38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.525261] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58b318a-767e-4a83-a2b3-33ee59776e4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.540702] env[68437]: DEBUG nova.compute.provider_tree [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.575216] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944041, 'name': ReconfigVM_Task, 'duration_secs': 0.687041} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.575847] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 995a3eae-c025-4efa-b509-0bf678bb0388/995a3eae-c025-4efa-b509-0bf678bb0388.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 810.577791] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8c14afc-7831-4acc-b316-83a38f5a0562 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.586049] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 810.586049] env[68437]: value = "task-2944043" [ 810.586049] env[68437]: _type = "Task" [ 810.586049] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.595252] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944043, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.619863] env[68437]: DEBUG oslo_vmware.api [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2944042, 'name': PowerOffVM_Task, 'duration_secs': 0.287457} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.620267] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 810.620459] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 810.620728] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0705672e-5100-4963-9489-12910cb95028 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.671293] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "892bf198-7d05-4995-8137-c57095c5c839" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.671293] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "892bf198-7d05-4995-8137-c57095c5c839" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.698679] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 810.699599] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 810.699599] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Deleting the datastore file [datastore2] ea330078-a8f2-41f4-a161-5d0e29ddfab5 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 810.699599] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be9b9fcb-3d4b-4270-9bd6-a8c5104995b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.706999] env[68437]: DEBUG oslo_vmware.api [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for the task: (returnval){ [ 810.706999] env[68437]: value = "task-2944045" [ 810.706999] env[68437]: _type = "Task" [ 810.706999] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.718285] env[68437]: DEBUG oslo_vmware.api [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2944045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.789679] env[68437]: DEBUG nova.compute.manager [req-0a48678b-e145-4a89-b21e-5a79a40e267f req-01cfdc45-f3b4-4509-bc63-1320beeef121 service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Received event network-vif-deleted-cc91b233-efdf-4cb6-9817-3f48a59237be {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 810.789921] env[68437]: INFO nova.compute.manager [req-0a48678b-e145-4a89-b21e-5a79a40e267f req-01cfdc45-f3b4-4509-bc63-1320beeef121 service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Neutron deleted interface cc91b233-efdf-4cb6-9817-3f48a59237be; detaching it from the instance and deleting it from the info cache [ 810.790119] env[68437]: DEBUG nova.network.neutron [req-0a48678b-e145-4a89-b21e-5a79a40e267f req-01cfdc45-f3b4-4509-bc63-1320beeef121 service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.047123] env[68437]: DEBUG nova.scheduler.client.report [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 811.060520] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "860107df-4e9b-44b1-9e85-b0ee3a827268" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.060788] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "860107df-4e9b-44b1-9e85-b0ee3a827268" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.061029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "860107df-4e9b-44b1-9e85-b0ee3a827268-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.061224] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "860107df-4e9b-44b1-9e85-b0ee3a827268-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.061389] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "860107df-4e9b-44b1-9e85-b0ee3a827268-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.063617] env[68437]: INFO nova.compute.manager [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Terminating instance [ 811.098083] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944043, 'name': Rename_Task, 'duration_secs': 0.242498} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.098350] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 811.098608] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c9a0457-6aef-4dbb-893d-25b46b0f675b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.105518] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 811.105518] env[68437]: value = "task-2944046" [ 811.105518] env[68437]: _type = "Task" [ 811.105518] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.121401] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.159032] env[68437]: DEBUG nova.network.neutron [-] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.221800] env[68437]: DEBUG oslo_vmware.api [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Task: {'id': task-2944045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271297} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.226763] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.226763] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 811.226763] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 811.226945] env[68437]: INFO nova.compute.manager [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Took 1.15 seconds to destroy the instance on the hypervisor. [ 811.227221] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 811.227439] env[68437]: DEBUG nova.compute.manager [-] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 811.227532] env[68437]: DEBUG nova.network.neutron [-] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 811.293359] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc998b81-bc52-4f71-b9a1-a493c0a0f5a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.308556] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c613239-a2dd-4597-8cde-341b83c85b43 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.354460] env[68437]: DEBUG nova.compute.manager [req-0a48678b-e145-4a89-b21e-5a79a40e267f req-01cfdc45-f3b4-4509-bc63-1320beeef121 service nova] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Detach interface failed, port_id=cc91b233-efdf-4cb6-9817-3f48a59237be, reason: Instance c74569b8-dfc9-4a74-9d25-74b484bd9477 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 811.553227] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.553764] env[68437]: DEBUG nova.compute.manager [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 811.556476] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 44.497s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.560060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.560060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.036s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.560060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.560727] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.939s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.562239] env[68437]: INFO nova.compute.claims [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.568363] env[68437]: DEBUG nova.compute.manager [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 811.568577] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 811.569487] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433cde63-fc16-4ab8-9f6e-9e41f013dd87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.578079] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 811.578365] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6b8d991-ebc5-48a6-970e-6065cde7ee9e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.587617] env[68437]: DEBUG oslo_vmware.api [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 811.587617] env[68437]: value = "task-2944047" [ 811.587617] env[68437]: _type = "Task" [ 811.587617] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.603216] env[68437]: DEBUG oslo_vmware.api [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944047, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.605102] env[68437]: INFO nova.scheduler.client.report [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Deleted allocations for instance 56cfa7f3-12ad-42d0-a27f-ab8136a903ee [ 811.619798] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944046, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.622285] env[68437]: INFO nova.scheduler.client.report [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Deleted allocations for instance 0484ccee-f003-4101-87c5-fed92f095d2d [ 811.659150] env[68437]: INFO nova.compute.manager [-] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Took 1.77 seconds to deallocate network for instance. [ 811.889801] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.890047] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.069776] env[68437]: DEBUG nova.compute.utils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 812.075125] env[68437]: DEBUG nova.compute.manager [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 812.075368] env[68437]: DEBUG nova.network.neutron [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 812.101766] env[68437]: DEBUG oslo_vmware.api [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944047, 'name': PowerOffVM_Task, 'duration_secs': 0.354977} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.103303] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 812.103476] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 812.103799] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-281ccc63-366e-402a-a547-35eea7b1a983 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.114666] env[68437]: DEBUG oslo_vmware.api [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944046, 'name': PowerOnVM_Task, 'duration_secs': 0.539166} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.117459] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 812.118130] env[68437]: INFO nova.compute.manager [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Took 10.03 seconds to spawn the instance on the hypervisor. [ 812.118332] env[68437]: DEBUG nova.compute.manager [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 812.119179] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6cd55ba5-0fc7-432c-b0e1-023d96a2df2e tempest-TenantUsagesTestJSON-193485392 tempest-TenantUsagesTestJSON-193485392-project-member] Lock "56cfa7f3-12ad-42d0-a27f-ab8136a903ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.556s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.121341] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e309b811-8521-4e54-aafa-ffacd77c96c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.136127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f7a3bf8-fdef-4192-bb2d-27f520250d3a tempest-ServersAdminNegativeTestJSON-673258702 tempest-ServersAdminNegativeTestJSON-673258702-project-member] Lock "0484ccee-f003-4101-87c5-fed92f095d2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.525s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.156345] env[68437]: DEBUG nova.policy [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f0129aa3ec50487c82da7fe906fde65a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f7150a60f6ca4620a4771ad9c8c8c644', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 812.177357] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.181619] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 812.181884] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 812.182113] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleting the datastore file [datastore1] 860107df-4e9b-44b1-9e85-b0ee3a827268 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 812.182402] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dd69265-f2fb-42f1-80cf-34425165f94c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.190897] env[68437]: DEBUG oslo_vmware.api [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 812.190897] env[68437]: value = "task-2944049" [ 812.190897] env[68437]: _type = "Task" [ 812.190897] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.199762] env[68437]: DEBUG oslo_vmware.api [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944049, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.578829] env[68437]: DEBUG nova.compute.manager [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 812.651594] env[68437]: INFO nova.compute.manager [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Took 49.49 seconds to build instance. [ 812.709263] env[68437]: DEBUG oslo_vmware.api [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944049, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.229043} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.709710] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 812.710026] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 812.710267] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 812.710559] env[68437]: INFO nova.compute.manager [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Took 1.14 seconds to destroy the instance on the hypervisor. [ 812.712391] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 812.712391] env[68437]: DEBUG nova.compute.manager [-] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 812.712391] env[68437]: DEBUG nova.network.neutron [-] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 812.794032] env[68437]: DEBUG nova.network.neutron [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Successfully created port: 6e6fed15-cc65-4f1f-9bf5-6854202b2ad0 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.839441] env[68437]: DEBUG nova.compute.manager [req-f3dc0866-fa0a-476c-aee1-6d86d702f2dd req-fa5d090b-e4cc-423b-8f7a-c30f98ecd86a service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Received event network-vif-deleted-cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 812.839730] env[68437]: INFO nova.compute.manager [req-f3dc0866-fa0a-476c-aee1-6d86d702f2dd req-fa5d090b-e4cc-423b-8f7a-c30f98ecd86a service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Neutron deleted interface cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49; detaching it from the instance and deleting it from the info cache [ 812.839914] env[68437]: DEBUG nova.network.neutron [req-f3dc0866-fa0a-476c-aee1-6d86d702f2dd req-fa5d090b-e4cc-423b-8f7a-c30f98ecd86a service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.956827] env[68437]: DEBUG nova.network.neutron [-] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.041059] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquiring lock "995a3eae-c025-4efa-b509-0bf678bb0388" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.156039] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2323bb62-b3f7-42b1-acd8-e8f1bfb23ba4 tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "995a3eae-c025-4efa-b509-0bf678bb0388" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.601s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.157380] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "995a3eae-c025-4efa-b509-0bf678bb0388" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.117s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.158113] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquiring lock "995a3eae-c025-4efa-b509-0bf678bb0388-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.158472] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "995a3eae-c025-4efa-b509-0bf678bb0388-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.158472] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "995a3eae-c025-4efa-b509-0bf678bb0388-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.160906] env[68437]: INFO nova.compute.manager [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Terminating instance [ 813.282191] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea452042-0ffe-447d-b74c-47507fa9aca8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.291474] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6763553-ac9d-4614-bd66-1be08bfb632c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.322893] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d507cf7-81ee-4293-aab0-e100f3c14b7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.330605] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e967ecf-921b-48e3-9170-d381940e932c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.348778] env[68437]: DEBUG nova.compute.provider_tree [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.350419] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13541220-9a65-4ff4-9e34-c6b3f0a4aa31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.359649] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfdda2c-35ea-4704-94fa-2e5559d234b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.406731] env[68437]: DEBUG nova.compute.manager [req-f3dc0866-fa0a-476c-aee1-6d86d702f2dd req-fa5d090b-e4cc-423b-8f7a-c30f98ecd86a service nova] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Detach interface failed, port_id=cbc16d3e-2e05-4f16-8b2a-1f2ba26dea49, reason: Instance ea330078-a8f2-41f4-a161-5d0e29ddfab5 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 813.459473] env[68437]: INFO nova.compute.manager [-] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Took 2.23 seconds to deallocate network for instance. [ 813.599275] env[68437]: DEBUG nova.compute.manager [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 813.628591] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 813.629115] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 813.629409] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 813.629698] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 813.629943] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 813.630219] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 813.630553] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 813.630821] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 813.631110] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 813.631987] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 813.631987] env[68437]: DEBUG nova.virt.hardware [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 813.632499] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84c28dc-857f-4f0e-bc9a-75cbfba326cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.641083] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973c90bf-20be-47e4-896e-12208206df57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.660970] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 813.672194] env[68437]: DEBUG nova.compute.manager [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 813.672433] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 813.674089] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9881022-e2a9-408b-915b-64d326d5bd1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.683549] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 813.684543] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dfc0e12-f13f-46ce-ad7f-8a1f2590d6c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.691700] env[68437]: DEBUG oslo_vmware.api [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 813.691700] env[68437]: value = "task-2944050" [ 813.691700] env[68437]: _type = "Task" [ 813.691700] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.701398] env[68437]: DEBUG oslo_vmware.api [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944050, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.759908] env[68437]: DEBUG nova.network.neutron [-] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.857137] env[68437]: DEBUG nova.scheduler.client.report [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.959826] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d48a84-3995-864e-3cd6-3e81cfa88bd4/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 813.962018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a2f653-a393-4d0e-95b6-d271ed300a7a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.968155] env[68437]: DEBUG oslo_concurrency.lockutils [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.968423] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d48a84-3995-864e-3cd6-3e81cfa88bd4/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 813.968569] env[68437]: ERROR oslo_vmware.rw_handles [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d48a84-3995-864e-3cd6-3e81cfa88bd4/disk-0.vmdk due to incomplete transfer. [ 813.968799] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4f9f23d2-cd9b-4979-9360-343ebc8f3ac8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.976494] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d48a84-3995-864e-3cd6-3e81cfa88bd4/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 813.976494] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Uploaded image ae3d5eb2-c744-4978-91bb-b1b9ef05e554 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 813.979595] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 813.979856] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c6cdb8b8-3cbe-41e9-80f0-e5178c76f7bb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.986706] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 813.986706] env[68437]: value = "task-2944051" [ 813.986706] env[68437]: _type = "Task" [ 813.986706] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.997133] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944051, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.012375] env[68437]: DEBUG oslo_concurrency.lockutils [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.012609] env[68437]: DEBUG oslo_concurrency.lockutils [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.186224] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.203559] env[68437]: DEBUG oslo_vmware.api [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944050, 'name': PowerOffVM_Task, 'duration_secs': 0.198225} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.204063] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 814.204372] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 814.204748] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1aeff83b-4597-486b-b215-3b0fc2320c7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.263123] env[68437]: INFO nova.compute.manager [-] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Took 1.55 seconds to deallocate network for instance. [ 814.270991] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 814.271298] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 814.271700] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Deleting the datastore file [datastore1] 995a3eae-c025-4efa-b509-0bf678bb0388 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 814.272219] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bf580b5-8a10-4f72-b246-871a51b3bb96 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.281314] env[68437]: DEBUG oslo_vmware.api [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for the task: (returnval){ [ 814.281314] env[68437]: value = "task-2944053" [ 814.281314] env[68437]: _type = "Task" [ 814.281314] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.290363] env[68437]: DEBUG oslo_vmware.api [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.368184] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.805s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.368184] env[68437]: DEBUG nova.compute.manager [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 814.369491] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.067s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.371182] env[68437]: INFO nova.compute.claims [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.496583] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944051, 'name': Destroy_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.499809] env[68437]: DEBUG nova.network.neutron [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Successfully updated port: 6e6fed15-cc65-4f1f-9bf5-6854202b2ad0 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 814.516065] env[68437]: DEBUG nova.compute.utils [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.684290] env[68437]: DEBUG nova.compute.manager [req-ab12a6ff-e941-419c-80ab-a7b53e46b578 req-8e7c8d3a-a432-4fa5-a518-08468dfff23f service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Received event network-vif-plugged-6e6fed15-cc65-4f1f-9bf5-6854202b2ad0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 814.685767] env[68437]: DEBUG oslo_concurrency.lockutils [req-ab12a6ff-e941-419c-80ab-a7b53e46b578 req-8e7c8d3a-a432-4fa5-a518-08468dfff23f service nova] Acquiring lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.686892] env[68437]: DEBUG oslo_concurrency.lockutils [req-ab12a6ff-e941-419c-80ab-a7b53e46b578 req-8e7c8d3a-a432-4fa5-a518-08468dfff23f service nova] Lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.688056] env[68437]: DEBUG oslo_concurrency.lockutils [req-ab12a6ff-e941-419c-80ab-a7b53e46b578 req-8e7c8d3a-a432-4fa5-a518-08468dfff23f service nova] Lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.688370] env[68437]: DEBUG nova.compute.manager [req-ab12a6ff-e941-419c-80ab-a7b53e46b578 req-8e7c8d3a-a432-4fa5-a518-08468dfff23f service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] No waiting events found dispatching network-vif-plugged-6e6fed15-cc65-4f1f-9bf5-6854202b2ad0 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 814.688672] env[68437]: WARNING nova.compute.manager [req-ab12a6ff-e941-419c-80ab-a7b53e46b578 req-8e7c8d3a-a432-4fa5-a518-08468dfff23f service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Received unexpected event network-vif-plugged-6e6fed15-cc65-4f1f-9bf5-6854202b2ad0 for instance with vm_state building and task_state spawning. [ 814.773778] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.791620] env[68437]: DEBUG oslo_vmware.api [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Task: {'id': task-2944053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160438} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.791841] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 814.792094] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 814.793367] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 814.793367] env[68437]: INFO nova.compute.manager [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Took 1.12 seconds to destroy the instance on the hypervisor. [ 814.793367] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.793367] env[68437]: DEBUG nova.compute.manager [-] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 814.793367] env[68437]: DEBUG nova.network.neutron [-] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 814.879640] env[68437]: DEBUG nova.compute.utils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.883855] env[68437]: DEBUG nova.compute.manager [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.884305] env[68437]: DEBUG nova.network.neutron [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 814.997808] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944051, 'name': Destroy_Task, 'duration_secs': 0.860247} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.998529] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Destroyed the VM [ 814.998776] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 814.999039] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a46b2f96-59e3-480f-964e-566fa602439c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.007692] env[68437]: DEBUG nova.policy [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e4b1b3012874778bc147c3e7b00133c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6f6382f6c6843529a37d7c62837523a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 815.013052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquiring lock "refresh_cache-c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.013052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquired lock "refresh_cache-c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.013052] env[68437]: DEBUG nova.network.neutron [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 815.015801] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 815.015801] env[68437]: value = "task-2944054" [ 815.015801] env[68437]: _type = "Task" [ 815.015801] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.021726] env[68437]: DEBUG oslo_concurrency.lockutils [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.030854] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944054, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.387565] env[68437]: DEBUG nova.compute.manager [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 815.475811] env[68437]: DEBUG nova.compute.manager [req-5fbea378-9bc3-4794-9f15-95592abaed33 req-a9d12d75-2fb1-4bf3-95c2-c4bd17df2947 service nova] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Received event network-vif-deleted-361c162e-f032-4355-aa03-a7b16b7ad181 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 815.529702] env[68437]: DEBUG oslo_vmware.api [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944054, 'name': RemoveSnapshot_Task, 'duration_secs': 0.471137} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.529979] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 815.530226] env[68437]: INFO nova.compute.manager [None req-bc4012df-a7ec-40e8-8937-5cb6af88b918 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Took 15.53 seconds to snapshot the instance on the hypervisor. [ 815.592557] env[68437]: DEBUG nova.network.neutron [-] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.650710] env[68437]: DEBUG nova.network.neutron [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Successfully created port: 4ec75d03-3ee1-480d-ab6a-acc211fd6bae {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.724509] env[68437]: DEBUG nova.network.neutron [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 815.968366] env[68437]: DEBUG nova.network.neutron [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Updating instance_info_cache with network_info: [{"id": "6e6fed15-cc65-4f1f-9bf5-6854202b2ad0", "address": "fa:16:3e:94:aa:20", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.104", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e6fed15-cc", "ovs_interfaceid": "6e6fed15-cc65-4f1f-9bf5-6854202b2ad0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.994095] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b2b87e-e92b-4b99-8968-af690c5d698b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.002553] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912dc558-3bc0-4ee5-8589-2cd6d897f017 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.042015] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d67043-44b1-497b-9993-e28a64831d99 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.053593] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf6757d-3539-4982-8f78-5dac069a55c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.068025] env[68437]: DEBUG nova.compute.provider_tree [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.098927] env[68437]: INFO nova.compute.manager [-] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Took 1.31 seconds to deallocate network for instance. [ 816.122970] env[68437]: DEBUG oslo_concurrency.lockutils [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.122970] env[68437]: DEBUG oslo_concurrency.lockutils [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.122970] env[68437]: INFO nova.compute.manager [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Attaching volume 078ffb5e-17fc-4576-9e94-9314af92b778 to /dev/sdb [ 816.162139] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-400326e4-34de-4053-baf2-5a15e1210978 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.171858] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95ea5ab-2d19-4a55-a788-cf345d9fefed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.187930] env[68437]: DEBUG nova.virt.block_device [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Updating existing volume attachment record: 0531f0f8-6a27-4599-994f-a8a5c6adf3db {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 816.403328] env[68437]: DEBUG nova.compute.manager [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 816.436476] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 816.436715] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.436883] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 816.437088] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.437354] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 816.437522] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 816.437591] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 816.437805] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 816.437902] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 816.438079] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 816.441178] env[68437]: DEBUG nova.virt.hardware [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 816.441178] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1f2eb4-48a2-4f19-9a1a-fdd7337a99d6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.446978] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08cac123-f481-472b-a2a1-84358b1a6db9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.472025] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Releasing lock "refresh_cache-c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.472025] env[68437]: DEBUG nova.compute.manager [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Instance network_info: |[{"id": "6e6fed15-cc65-4f1f-9bf5-6854202b2ad0", "address": "fa:16:3e:94:aa:20", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.104", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e6fed15-cc", "ovs_interfaceid": "6e6fed15-cc65-4f1f-9bf5-6854202b2ad0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 816.472250] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:aa:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e6fed15-cc65-4f1f-9bf5-6854202b2ad0', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 816.490804] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Creating folder: Project (f7150a60f6ca4620a4771ad9c8c8c644). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 816.490804] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4225cf8-9fef-4aa8-811e-8b95e97b5d1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.506855] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Created folder: Project (f7150a60f6ca4620a4771ad9c8c8c644) in parent group-v590848. [ 816.507364] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Creating folder: Instances. Parent ref: group-v590962. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 816.507364] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8083d1dd-240f-47b6-b92b-29f6af47e033 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.517744] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Created folder: Instances in parent group-v590962. [ 816.518016] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 816.518282] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 816.518424] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5e55c76-a19a-4ce6-a412-7501b87933b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.546896] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 816.546896] env[68437]: value = "task-2944059" [ 816.546896] env[68437]: _type = "Task" [ 816.546896] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.557541] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944059, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.571646] env[68437]: DEBUG nova.scheduler.client.report [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 816.606235] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.860815] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.861168] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.861326] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.861975] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.862190] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.864367] env[68437]: INFO nova.compute.manager [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Terminating instance [ 817.063917] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944059, 'name': CreateVM_Task, 'duration_secs': 0.376162} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.065161] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 817.067031] env[68437]: DEBUG nova.compute.manager [req-e7dc7439-41bd-481d-976d-734b59280129 req-70b8dee1-e8ed-4be4-9347-84072fbca907 service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Received event network-changed-6e6fed15-cc65-4f1f-9bf5-6854202b2ad0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 817.067031] env[68437]: DEBUG nova.compute.manager [req-e7dc7439-41bd-481d-976d-734b59280129 req-70b8dee1-e8ed-4be4-9347-84072fbca907 service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Refreshing instance network info cache due to event network-changed-6e6fed15-cc65-4f1f-9bf5-6854202b2ad0. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 817.068027] env[68437]: DEBUG oslo_concurrency.lockutils [req-e7dc7439-41bd-481d-976d-734b59280129 req-70b8dee1-e8ed-4be4-9347-84072fbca907 service nova] Acquiring lock "refresh_cache-c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.068027] env[68437]: DEBUG oslo_concurrency.lockutils [req-e7dc7439-41bd-481d-976d-734b59280129 req-70b8dee1-e8ed-4be4-9347-84072fbca907 service nova] Acquired lock "refresh_cache-c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.068027] env[68437]: DEBUG nova.network.neutron [req-e7dc7439-41bd-481d-976d-734b59280129 req-70b8dee1-e8ed-4be4-9347-84072fbca907 service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Refreshing network info cache for port 6e6fed15-cc65-4f1f-9bf5-6854202b2ad0 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 817.070612] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.074188] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.074565] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 817.075329] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72c44ed2-847d-4959-9abd-abce751bd616 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.079284] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.079284] env[68437]: DEBUG nova.compute.manager [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 817.081727] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.014s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.083271] env[68437]: INFO nova.compute.claims [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.090283] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 817.090283] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ce4196-5e65-4e49-b6b0-9aa00b3a6826" [ 817.090283] env[68437]: _type = "Task" [ 817.090283] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.098850] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ce4196-5e65-4e49-b6b0-9aa00b3a6826, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.371024] env[68437]: DEBUG nova.compute.manager [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 817.371024] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 817.371329] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b0b2dc-2a9c-43b1-abf2-1d2f7b719f88 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.380856] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 817.381603] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9b971a6-fddc-4afa-ac10-bc1049cb06ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.448971] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 817.449274] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 817.449407] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleting the datastore file [datastore2] 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 817.449709] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4a9c036-15ea-4836-9e81-daac4f8aa08e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.458164] env[68437]: DEBUG oslo_vmware.api [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 817.458164] env[68437]: value = "task-2944062" [ 817.458164] env[68437]: _type = "Task" [ 817.458164] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.466751] env[68437]: DEBUG oslo_vmware.api [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.595351] env[68437]: DEBUG nova.compute.utils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 817.595351] env[68437]: DEBUG nova.compute.manager [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 817.595351] env[68437]: DEBUG nova.network.neutron [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 817.606931] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ce4196-5e65-4e49-b6b0-9aa00b3a6826, 'name': SearchDatastore_Task, 'duration_secs': 0.014092} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.607251] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.607509] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 817.607842] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.607996] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.608806] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 817.608806] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76fdd72a-3d0a-4c7f-8d31-377590aa73d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.619461] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 817.619670] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 817.620377] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9163bf9e-9345-4c89-9647-2f8e9ef49436 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.626075] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 817.626075] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a97c8f-e763-b289-4510-4fd75e311ef6" [ 817.626075] env[68437]: _type = "Task" [ 817.626075] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.636506] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a97c8f-e763-b289-4510-4fd75e311ef6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.724777] env[68437]: DEBUG nova.policy [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2c73f6b49864dcc9ddfefdd506563aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '524f14821876408ab47b277081b145d7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.920855] env[68437]: DEBUG nova.network.neutron [req-e7dc7439-41bd-481d-976d-734b59280129 req-70b8dee1-e8ed-4be4-9347-84072fbca907 service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Updated VIF entry in instance network info cache for port 6e6fed15-cc65-4f1f-9bf5-6854202b2ad0. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 817.920855] env[68437]: DEBUG nova.network.neutron [req-e7dc7439-41bd-481d-976d-734b59280129 req-70b8dee1-e8ed-4be4-9347-84072fbca907 service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Updating instance_info_cache with network_info: [{"id": "6e6fed15-cc65-4f1f-9bf5-6854202b2ad0", "address": "fa:16:3e:94:aa:20", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.104", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e6fed15-cc", "ovs_interfaceid": "6e6fed15-cc65-4f1f-9bf5-6854202b2ad0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.975533] env[68437]: DEBUG oslo_vmware.api [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284076} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.975533] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 817.975533] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 817.975533] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 817.975533] env[68437]: INFO nova.compute.manager [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Took 0.60 seconds to destroy the instance on the hypervisor. [ 817.977736] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 817.977736] env[68437]: DEBUG nova.compute.manager [-] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 817.977736] env[68437]: DEBUG nova.network.neutron [-] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 818.016849] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.017217] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.079306] env[68437]: DEBUG nova.compute.manager [req-2b3cd9b6-e5c9-4198-917f-23d991c5f174 req-7e9fac32-0604-4e00-92cb-d259e7928cd8 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Received event network-vif-plugged-4ec75d03-3ee1-480d-ab6a-acc211fd6bae {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 818.080193] env[68437]: DEBUG oslo_concurrency.lockutils [req-2b3cd9b6-e5c9-4198-917f-23d991c5f174 req-7e9fac32-0604-4e00-92cb-d259e7928cd8 service nova] Acquiring lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.080717] env[68437]: DEBUG oslo_concurrency.lockutils [req-2b3cd9b6-e5c9-4198-917f-23d991c5f174 req-7e9fac32-0604-4e00-92cb-d259e7928cd8 service nova] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.081248] env[68437]: DEBUG oslo_concurrency.lockutils [req-2b3cd9b6-e5c9-4198-917f-23d991c5f174 req-7e9fac32-0604-4e00-92cb-d259e7928cd8 service nova] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.081248] env[68437]: DEBUG nova.compute.manager [req-2b3cd9b6-e5c9-4198-917f-23d991c5f174 req-7e9fac32-0604-4e00-92cb-d259e7928cd8 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] No waiting events found dispatching network-vif-plugged-4ec75d03-3ee1-480d-ab6a-acc211fd6bae {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 818.081627] env[68437]: WARNING nova.compute.manager [req-2b3cd9b6-e5c9-4198-917f-23d991c5f174 req-7e9fac32-0604-4e00-92cb-d259e7928cd8 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Received unexpected event network-vif-plugged-4ec75d03-3ee1-480d-ab6a-acc211fd6bae for instance with vm_state building and task_state spawning. [ 818.100167] env[68437]: DEBUG nova.compute.manager [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 818.144774] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a97c8f-e763-b289-4510-4fd75e311ef6, 'name': SearchDatastore_Task, 'duration_secs': 0.011151} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.145827] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-140af649-3ecc-4b38-b220-aa88b872e567 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.151017] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 818.151017] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c66f89-42b6-e248-07a4-c880be5d7849" [ 818.151017] env[68437]: _type = "Task" [ 818.151017] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.164030] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c66f89-42b6-e248-07a4-c880be5d7849, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.253221] env[68437]: DEBUG nova.network.neutron [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Successfully created port: 9edc8a0b-761d-4911-904e-9cb4a163bf7e {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 818.266058] env[68437]: DEBUG nova.network.neutron [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Successfully updated port: 4ec75d03-3ee1-480d-ab6a-acc211fd6bae {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 818.424972] env[68437]: DEBUG oslo_concurrency.lockutils [req-e7dc7439-41bd-481d-976d-734b59280129 req-70b8dee1-e8ed-4be4-9347-84072fbca907 service nova] Releasing lock "refresh_cache-c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.426911] env[68437]: DEBUG nova.compute.manager [req-e7dc7439-41bd-481d-976d-734b59280129 req-70b8dee1-e8ed-4be4-9347-84072fbca907 service nova] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Received event network-vif-deleted-b0fdf74e-1890-4cc6-97a1-0345a2a42b11 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 818.606334] env[68437]: INFO nova.virt.block_device [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Booting with volume d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7 at /dev/sda [ 818.659674] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b24e0f45-ce97-4a86-8c10-dddf7d077ee4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.669046] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c66f89-42b6-e248-07a4-c880be5d7849, 'name': SearchDatastore_Task, 'duration_secs': 0.03502} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.672017] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.672017] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] c5af19d6-5534-45e6-8c9c-dacf30d4fb1a/c5af19d6-5534-45e6-8c9c-dacf30d4fb1a.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 818.672017] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea502260-72b0-4b14-9c33-2dd5fb8d36e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.680209] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223735c6-92e4-44e0-85a1-64fabda0e582 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.695590] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 818.695590] env[68437]: value = "task-2944064" [ 818.695590] env[68437]: _type = "Task" [ 818.695590] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.703543] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.720674] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d81322f9-919b-4094-8742-43149f787705 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.728820] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819c748c-1a39-44a3-bd9e-7b2c3b261571 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.768821] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc665f7d-64e5-4e13-a9fc-a6d73f1709be {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.771879] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.772000] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.772195] env[68437]: DEBUG nova.network.neutron [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 818.782580] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9bd0c30-88ca-46da-bd58-fde00cddf7a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.787710] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000631c0-15e9-4d12-8a48-dc5012e01e82 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.795978] env[68437]: DEBUG nova.virt.block_device [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Updating existing volume attachment record: 7066bc87-d951-4801-86da-a9ec8e4791ac {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 818.800757] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed678b6-2363-46dd-ad32-eb22409face4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.830769] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffaaf87e-dbbc-46e6-a23c-9a93b637d202 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.838387] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7296c5cb-44b6-4717-ad85-636beb27aa45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.854210] env[68437]: DEBUG nova.compute.provider_tree [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.917318] env[68437]: DEBUG nova.network.neutron [-] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.206776] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480986} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.207130] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] c5af19d6-5534-45e6-8c9c-dacf30d4fb1a/c5af19d6-5534-45e6-8c9c-dacf30d4fb1a.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 819.207385] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 819.207690] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97916eaf-c805-4efa-8c43-b3e062b5e6fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.214427] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 819.214427] env[68437]: value = "task-2944065" [ 819.214427] env[68437]: _type = "Task" [ 819.214427] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.225062] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944065, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.312976] env[68437]: DEBUG nova.network.neutron [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 819.358146] env[68437]: DEBUG nova.scheduler.client.report [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 819.420014] env[68437]: INFO nova.compute.manager [-] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Took 1.44 seconds to deallocate network for instance. [ 819.472900] env[68437]: DEBUG nova.compute.manager [req-dfbb4710-5b80-4c7b-bdc5-6e7ac91aed1b req-a83f76b2-5afe-4897-aec5-09f0294a9689 service nova] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Received event network-vif-deleted-4d42d18c-3f02-433f-886e-903fef8696e7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 819.488695] env[68437]: DEBUG nova.network.neutron [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance_info_cache with network_info: [{"id": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "address": "fa:16:3e:cc:6f:88", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec75d03-3e", "ovs_interfaceid": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.728792] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944065, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07207} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.729167] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 819.730017] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34865328-3889-4e7a-a5ab-c92be2250e01 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.758068] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] c5af19d6-5534-45e6-8c9c-dacf30d4fb1a/c5af19d6-5534-45e6-8c9c-dacf30d4fb1a.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 819.758561] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquiring lock "de54bc8d-2626-41fc-970a-865a842a932e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.758784] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "de54bc8d-2626-41fc-970a-865a842a932e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.759338] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d1a5d6c-d82f-41cd-b9f3-65c29c22a88f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.780706] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 819.780706] env[68437]: value = "task-2944066" [ 819.780706] env[68437]: _type = "Task" [ 819.780706] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.789892] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944066, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.866157] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.784s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.866721] env[68437]: DEBUG nova.compute.manager [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 819.872954] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.783s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.877343] env[68437]: INFO nova.compute.claims [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 819.888685] env[68437]: DEBUG nova.network.neutron [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Successfully updated port: 9edc8a0b-761d-4911-904e-9cb4a163bf7e {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.926902] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.969015] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.970026] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.992727] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.993068] env[68437]: DEBUG nova.compute.manager [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Instance network_info: |[{"id": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "address": "fa:16:3e:cc:6f:88", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec75d03-3e", "ovs_interfaceid": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 819.993477] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:6f:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ec75d03-3ee1-480d-ab6a-acc211fd6bae', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.001566] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Creating folder: Project (f6f6382f6c6843529a37d7c62837523a). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.002503] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75a6d18f-722e-41f1-be1f-79ce27358eef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.016643] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Created folder: Project (f6f6382f6c6843529a37d7c62837523a) in parent group-v590848. [ 820.016861] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Creating folder: Instances. Parent ref: group-v590967. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.017090] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38ff5a89-7cab-4211-bfd3-1d655cbabfc5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.026378] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Created folder: Instances in parent group-v590967. [ 820.026731] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.027015] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.027254] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-725c68eb-72bc-43b3-88c3-98f2bbea2b51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.047990] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 820.047990] env[68437]: value = "task-2944069" [ 820.047990] env[68437]: _type = "Task" [ 820.047990] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.055574] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944069, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.107311] env[68437]: DEBUG nova.compute.manager [req-9295f49d-7dd8-4565-82b5-7021476df2f1 req-65480c40-fa45-4b03-b239-d619c5280d25 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Received event network-changed-4ec75d03-3ee1-480d-ab6a-acc211fd6bae {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 820.107311] env[68437]: DEBUG nova.compute.manager [req-9295f49d-7dd8-4565-82b5-7021476df2f1 req-65480c40-fa45-4b03-b239-d619c5280d25 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Refreshing instance network info cache due to event network-changed-4ec75d03-3ee1-480d-ab6a-acc211fd6bae. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 820.107572] env[68437]: DEBUG oslo_concurrency.lockutils [req-9295f49d-7dd8-4565-82b5-7021476df2f1 req-65480c40-fa45-4b03-b239-d619c5280d25 service nova] Acquiring lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.107780] env[68437]: DEBUG oslo_concurrency.lockutils [req-9295f49d-7dd8-4565-82b5-7021476df2f1 req-65480c40-fa45-4b03-b239-d619c5280d25 service nova] Acquired lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.107991] env[68437]: DEBUG nova.network.neutron [req-9295f49d-7dd8-4565-82b5-7021476df2f1 req-65480c40-fa45-4b03-b239-d619c5280d25 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Refreshing network info cache for port 4ec75d03-3ee1-480d-ab6a-acc211fd6bae {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 820.293498] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944066, 'name': ReconfigVM_Task, 'duration_secs': 0.290582} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.298019] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Reconfigured VM instance instance-00000028 to attach disk [datastore1] c5af19d6-5534-45e6-8c9c-dacf30d4fb1a/c5af19d6-5534-45e6-8c9c-dacf30d4fb1a.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 820.298019] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f609acc-ba5e-4443-84d2-15557d78775a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.304565] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 820.304565] env[68437]: value = "task-2944070" [ 820.304565] env[68437]: _type = "Task" [ 820.304565] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.313993] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944070, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.383694] env[68437]: DEBUG nova.compute.utils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 820.389111] env[68437]: DEBUG nova.compute.manager [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 820.389569] env[68437]: DEBUG nova.network.neutron [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 820.396670] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Acquiring lock "refresh_cache-29e9555b-f928-43e7-a3a3-869ed07d7326" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.396670] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Acquired lock "refresh_cache-29e9555b-f928-43e7-a3a3-869ed07d7326" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.396670] env[68437]: DEBUG nova.network.neutron [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 820.452214] env[68437]: DEBUG nova.policy [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0888d4e1bac46099e851a9d2f6a84b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc43ad02f60c41178dc8b891b605843d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 820.558477] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944069, 'name': CreateVM_Task, 'duration_secs': 0.469124} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.558905] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 820.559447] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.559681] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.560025] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 820.560312] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22912cc4-1ab4-4deb-aaf3-db96022d5bb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.565644] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 820.565644] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52377796-ca57-6e74-9329-b41631c5113c" [ 820.565644] env[68437]: _type = "Task" [ 820.565644] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.573847] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52377796-ca57-6e74-9329-b41631c5113c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.761483] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 820.761483] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590966', 'volume_id': '078ffb5e-17fc-4576-9e94-9314af92b778', 'name': 'volume-078ffb5e-17fc-4576-9e94-9314af92b778', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cf394b0b-cb14-4ae1-81bb-622c951bfdab', 'attached_at': '', 'detached_at': '', 'volume_id': '078ffb5e-17fc-4576-9e94-9314af92b778', 'serial': '078ffb5e-17fc-4576-9e94-9314af92b778'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 820.762197] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43710392-8401-40db-9471-992f6231bbe0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.780856] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b420a454-c777-4062-b554-c3ed0229ab13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.812811] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] volume-078ffb5e-17fc-4576-9e94-9314af92b778/volume-078ffb5e-17fc-4576-9e94-9314af92b778.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 820.815713] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ac55920-af5d-428e-964f-83587334b35a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.838984] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944070, 'name': Rename_Task, 'duration_secs': 0.159734} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.840146] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 820.840476] env[68437]: DEBUG oslo_vmware.api [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 820.840476] env[68437]: value = "task-2944071" [ 820.840476] env[68437]: _type = "Task" [ 820.840476] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.840636] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-729f9854-b34f-4e33-ab1b-472d290f628d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.850087] env[68437]: DEBUG oslo_vmware.api [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944071, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.851240] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 820.851240] env[68437]: value = "task-2944072" [ 820.851240] env[68437]: _type = "Task" [ 820.851240] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.865514] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.880066] env[68437]: DEBUG nova.network.neutron [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Successfully created port: 8aea055c-08c3-4b2a-ba4e-4aa831098aff {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 820.893503] env[68437]: DEBUG nova.compute.manager [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 820.915886] env[68437]: DEBUG nova.compute.manager [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.916921] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 820.916921] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.916921] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 820.917139] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.917173] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 820.917322] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 820.917572] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 820.917695] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 820.918621] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 820.919085] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 820.919614] env[68437]: DEBUG nova.virt.hardware [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 820.925333] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8c4baf-2590-4559-9fbc-af236ab45001 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.933821] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfcb6f0-73e3-4e44-ad83-9727b528d5d6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.939478] env[68437]: DEBUG nova.network.neutron [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 820.959722] env[68437]: DEBUG nova.network.neutron [req-9295f49d-7dd8-4565-82b5-7021476df2f1 req-65480c40-fa45-4b03-b239-d619c5280d25 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updated VIF entry in instance network info cache for port 4ec75d03-3ee1-480d-ab6a-acc211fd6bae. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 820.960097] env[68437]: DEBUG nova.network.neutron [req-9295f49d-7dd8-4565-82b5-7021476df2f1 req-65480c40-fa45-4b03-b239-d619c5280d25 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance_info_cache with network_info: [{"id": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "address": "fa:16:3e:cc:6f:88", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec75d03-3e", "ovs_interfaceid": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.078643] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52377796-ca57-6e74-9329-b41631c5113c, 'name': SearchDatastore_Task, 'duration_secs': 0.010011} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.079088] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.079606] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.079606] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.079751] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.079897] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.080221] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-371e4e94-b278-4c12-ab4f-46a43d5d4d66 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.090092] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.090309] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.091040] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-921f04f4-11b2-4640-84f8-c09d286e4943 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.096703] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 821.096703] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5273b23d-deca-d17c-0cbb-10ca83a684e3" [ 821.096703] env[68437]: _type = "Task" [ 821.096703] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.107737] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5273b23d-deca-d17c-0cbb-10ca83a684e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.168151] env[68437]: DEBUG nova.network.neutron [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Updating instance_info_cache with network_info: [{"id": "9edc8a0b-761d-4911-904e-9cb4a163bf7e", "address": "fa:16:3e:a6:19:7e", "network": {"id": "27a6bae9-e605-41e4-86ef-8e2ff40dc05d", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2117536478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "524f14821876408ab47b277081b145d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9edc8a0b-76", "ovs_interfaceid": "9edc8a0b-761d-4911-904e-9cb4a163bf7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.351672] env[68437]: DEBUG oslo_vmware.api [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944071, 'name': ReconfigVM_Task, 'duration_secs': 0.455126} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.352055] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Reconfigured VM instance instance-00000011 to attach disk [datastore1] volume-078ffb5e-17fc-4576-9e94-9314af92b778/volume-078ffb5e-17fc-4576-9e94-9314af92b778.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 821.359733] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb96242e-fc01-4ed7-a143-7994b136d836 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.379536] env[68437]: DEBUG oslo_vmware.api [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944072, 'name': PowerOnVM_Task, 'duration_secs': 0.507163} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.382021] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.382021] env[68437]: INFO nova.compute.manager [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Took 7.78 seconds to spawn the instance on the hypervisor. [ 821.382021] env[68437]: DEBUG nova.compute.manager [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 821.382281] env[68437]: DEBUG oslo_vmware.api [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 821.382281] env[68437]: value = "task-2944073" [ 821.382281] env[68437]: _type = "Task" [ 821.382281] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.385206] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3267bbf-15c1-428c-8d18-d9516c3523c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.402535] env[68437]: DEBUG oslo_vmware.api [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944073, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.462708] env[68437]: DEBUG oslo_concurrency.lockutils [req-9295f49d-7dd8-4565-82b5-7021476df2f1 req-65480c40-fa45-4b03-b239-d619c5280d25 service nova] Releasing lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.500252] env[68437]: DEBUG nova.compute.manager [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Received event network-vif-plugged-9edc8a0b-761d-4911-904e-9cb4a163bf7e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 821.500503] env[68437]: DEBUG oslo_concurrency.lockutils [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] Acquiring lock "29e9555b-f928-43e7-a3a3-869ed07d7326-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.500722] env[68437]: DEBUG oslo_concurrency.lockutils [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] Lock "29e9555b-f928-43e7-a3a3-869ed07d7326-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.500879] env[68437]: DEBUG oslo_concurrency.lockutils [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] Lock "29e9555b-f928-43e7-a3a3-869ed07d7326-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.501071] env[68437]: DEBUG nova.compute.manager [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] No waiting events found dispatching network-vif-plugged-9edc8a0b-761d-4911-904e-9cb4a163bf7e {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 821.501317] env[68437]: WARNING nova.compute.manager [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Received unexpected event network-vif-plugged-9edc8a0b-761d-4911-904e-9cb4a163bf7e for instance with vm_state building and task_state spawning. [ 821.501557] env[68437]: DEBUG nova.compute.manager [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Received event network-changed-9edc8a0b-761d-4911-904e-9cb4a163bf7e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 821.501784] env[68437]: DEBUG nova.compute.manager [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Refreshing instance network info cache due to event network-changed-9edc8a0b-761d-4911-904e-9cb4a163bf7e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 821.501972] env[68437]: DEBUG oslo_concurrency.lockutils [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] Acquiring lock "refresh_cache-29e9555b-f928-43e7-a3a3-869ed07d7326" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.548349] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1eead17-fc21-414c-acc0-9a6f29ee7cac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.556625] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03650bfe-5b76-4734-9285-9e5defcdd9bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.587680] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8847ab17-60e9-4975-bc85-bd5f301491aa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.593984] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67c9058-fb1f-46e1-94e5-4466c92d7f2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.610868] env[68437]: DEBUG nova.compute.provider_tree [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.617417] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5273b23d-deca-d17c-0cbb-10ca83a684e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010292} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.618172] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93a9f93d-6297-495e-bdc2-22878a3897af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.624014] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 821.624014] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523fa1eb-2bcc-8e42-de4e-1416b13dcee4" [ 821.624014] env[68437]: _type = "Task" [ 821.624014] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.634217] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523fa1eb-2bcc-8e42-de4e-1416b13dcee4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.669553] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Releasing lock "refresh_cache-29e9555b-f928-43e7-a3a3-869ed07d7326" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.669876] env[68437]: DEBUG nova.compute.manager [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Instance network_info: |[{"id": "9edc8a0b-761d-4911-904e-9cb4a163bf7e", "address": "fa:16:3e:a6:19:7e", "network": {"id": "27a6bae9-e605-41e4-86ef-8e2ff40dc05d", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2117536478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "524f14821876408ab47b277081b145d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9edc8a0b-76", "ovs_interfaceid": "9edc8a0b-761d-4911-904e-9cb4a163bf7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 821.670186] env[68437]: DEBUG oslo_concurrency.lockutils [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] Acquired lock "refresh_cache-29e9555b-f928-43e7-a3a3-869ed07d7326" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.670369] env[68437]: DEBUG nova.network.neutron [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Refreshing network info cache for port 9edc8a0b-761d-4911-904e-9cb4a163bf7e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 821.671574] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:19:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b00fe87c-d828-442f-bd09-e9018c468557', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9edc8a0b-761d-4911-904e-9cb4a163bf7e', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.679117] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Creating folder: Project (524f14821876408ab47b277081b145d7). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.680044] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13a2338e-6b7a-4e32-8d88-970ddee0806a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.693782] env[68437]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 821.693940] env[68437]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68437) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 821.694273] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Folder already exists: Project (524f14821876408ab47b277081b145d7). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 821.694467] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Creating folder: Instances. Parent ref: group-v590909. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.694722] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ab2a0ba-67bc-4e88-9edc-7d2a124b6ad0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.704093] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Created folder: Instances in parent group-v590909. [ 821.704324] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.704517] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.704720] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac722d72-a099-448e-b928-61135f745072 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.723244] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.723244] env[68437]: value = "task-2944076" [ 821.723244] env[68437]: _type = "Task" [ 821.723244] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.734108] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944076, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.900022] env[68437]: DEBUG oslo_vmware.api [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944073, 'name': ReconfigVM_Task, 'duration_secs': 0.178859} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.900022] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590966', 'volume_id': '078ffb5e-17fc-4576-9e94-9314af92b778', 'name': 'volume-078ffb5e-17fc-4576-9e94-9314af92b778', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cf394b0b-cb14-4ae1-81bb-622c951bfdab', 'attached_at': '', 'detached_at': '', 'volume_id': '078ffb5e-17fc-4576-9e94-9314af92b778', 'serial': '078ffb5e-17fc-4576-9e94-9314af92b778'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 821.918124] env[68437]: DEBUG nova.compute.manager [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 821.923061] env[68437]: INFO nova.compute.manager [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Took 57.95 seconds to build instance. [ 821.974118] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 821.974412] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.974586] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.974774] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.974919] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.975079] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 821.975325] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 821.975898] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 821.975898] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 821.976041] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 821.976174] env[68437]: DEBUG nova.virt.hardware [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 821.978464] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4675a001-742d-4d73-8f94-c055793f29a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.987317] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbeda717-9a37-4502-b469-6f4a8b5aece9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.114116] env[68437]: DEBUG nova.scheduler.client.report [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 822.133881] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523fa1eb-2bcc-8e42-de4e-1416b13dcee4, 'name': SearchDatastore_Task, 'duration_secs': 0.026001} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.134352] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.134635] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 8ccd7176-55c0-4118-a07e-3c4bdbba9795/8ccd7176-55c0-4118-a07e-3c4bdbba9795.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.135011] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25169c66-4b9b-4b3d-a384-d64490961753 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.141242] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 822.141242] env[68437]: value = "task-2944077" [ 822.141242] env[68437]: _type = "Task" [ 822.141242] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.149889] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.233471] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944076, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.377898] env[68437]: DEBUG nova.compute.manager [None req-60d20874-2509-4d3c-9486-6a922e0015d5 tempest-ServerExternalEventsTest-632306964 tempest-ServerExternalEventsTest-632306964-project] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Received event network-changed {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 822.378152] env[68437]: DEBUG nova.compute.manager [None req-60d20874-2509-4d3c-9486-6a922e0015d5 tempest-ServerExternalEventsTest-632306964 tempest-ServerExternalEventsTest-632306964-project] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Refreshing instance network info cache due to event network-changed. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 822.378303] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60d20874-2509-4d3c-9486-6a922e0015d5 tempest-ServerExternalEventsTest-632306964 tempest-ServerExternalEventsTest-632306964-project] Acquiring lock "refresh_cache-c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.378452] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60d20874-2509-4d3c-9486-6a922e0015d5 tempest-ServerExternalEventsTest-632306964 tempest-ServerExternalEventsTest-632306964-project] Acquired lock "refresh_cache-c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.378612] env[68437]: DEBUG nova.network.neutron [None req-60d20874-2509-4d3c-9486-6a922e0015d5 tempest-ServerExternalEventsTest-632306964 tempest-ServerExternalEventsTest-632306964-project] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 822.428137] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b5f32e1c-fb87-42d2-a648-4a6958c1f0e4 tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.587s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.513778] env[68437]: DEBUG nova.network.neutron [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Updated VIF entry in instance network info cache for port 9edc8a0b-761d-4911-904e-9cb4a163bf7e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 822.514466] env[68437]: DEBUG nova.network.neutron [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Updating instance_info_cache with network_info: [{"id": "9edc8a0b-761d-4911-904e-9cb4a163bf7e", "address": "fa:16:3e:a6:19:7e", "network": {"id": "27a6bae9-e605-41e4-86ef-8e2ff40dc05d", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2117536478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "524f14821876408ab47b277081b145d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9edc8a0b-76", "ovs_interfaceid": "9edc8a0b-761d-4911-904e-9cb4a163bf7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.619938] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.620577] env[68437]: DEBUG nova.compute.manager [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 822.626293] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.498s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.628949] env[68437]: INFO nova.compute.claims [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.652751] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944077, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500871} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.652751] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 8ccd7176-55c0-4118-a07e-3c4bdbba9795/8ccd7176-55c0-4118-a07e-3c4bdbba9795.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.652922] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.653961] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf54b7bc-1cb5-445d-85d0-cbf04fa86bfd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.662021] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 822.662021] env[68437]: value = "task-2944078" [ 822.662021] env[68437]: _type = "Task" [ 822.662021] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.668164] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.704197] env[68437]: DEBUG nova.network.neutron [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Successfully updated port: 8aea055c-08c3-4b2a-ba4e-4aa831098aff {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 822.736509] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944076, 'name': CreateVM_Task, 'duration_secs': 0.568069} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.737300] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.737991] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '7066bc87-d951-4801-86da-a9ec8e4791ac', 'device_type': None, 'mount_device': '/dev/sda', 'boot_index': 0, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590922', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'name': 'volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29e9555b-f928-43e7-a3a3-869ed07d7326', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'serial': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7'}, 'delete_on_termination': True, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68437) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 822.738299] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Root volume attach. Driver type: vmdk {{(pid=68437) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 822.739251] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f775820-ecf1-4185-8538-a86db988ce95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.748215] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d13c281-ec94-43e8-8a10-0205fbaec8ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.753988] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d0c208-ada8-486a-a54d-775d5d346cc7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.761286] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-dfea4778-b5fd-442d-b61b-840ad23004dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.766263] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for the task: (returnval){ [ 822.766263] env[68437]: value = "task-2944079" [ 822.766263] env[68437]: _type = "Task" [ 822.766263] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.773570] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944079, 'name': RelocateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.934509] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 823.018414] env[68437]: DEBUG oslo_concurrency.lockutils [req-6af0da4e-ffd7-473e-b145-0cf8184a9f0d req-9b586f94-6207-4c6d-be7a-fe6b4e2f5bb4 service nova] Releasing lock "refresh_cache-29e9555b-f928-43e7-a3a3-869ed07d7326" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.089318] env[68437]: DEBUG nova.objects.instance [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lazy-loading 'flavor' on Instance uuid cf394b0b-cb14-4ae1-81bb-622c951bfdab {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 823.103765] env[68437]: DEBUG nova.network.neutron [None req-60d20874-2509-4d3c-9486-6a922e0015d5 tempest-ServerExternalEventsTest-632306964 tempest-ServerExternalEventsTest-632306964-project] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Updating instance_info_cache with network_info: [{"id": "6e6fed15-cc65-4f1f-9bf5-6854202b2ad0", "address": "fa:16:3e:94:aa:20", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.104", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e6fed15-cc", "ovs_interfaceid": "6e6fed15-cc65-4f1f-9bf5-6854202b2ad0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.127560] env[68437]: DEBUG nova.compute.utils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 823.132348] env[68437]: DEBUG nova.compute.manager [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 823.132348] env[68437]: DEBUG nova.network.neutron [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 823.170852] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068068} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.171390] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.172981] env[68437]: DEBUG nova.policy [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '915414e3be8a4311a96513aa2fec7053', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ae9811689c645a7af2096a600ed6e1e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 823.174992] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15d3829-a02a-4119-ae6b-92d1a3efb3a6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.198539] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 8ccd7176-55c0-4118-a07e-3c4bdbba9795/8ccd7176-55c0-4118-a07e-3c4bdbba9795.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.198934] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9f1b696-0b3a-4647-8095-81413843a23b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.214905] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquiring lock "refresh_cache-7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.215072] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquired lock "refresh_cache-7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.215266] env[68437]: DEBUG nova.network.neutron [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 823.224602] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 823.224602] env[68437]: value = "task-2944080" [ 823.224602] env[68437]: _type = "Task" [ 823.224602] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.234619] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944080, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.279760] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944079, 'name': RelocateVM_Task, 'duration_secs': 0.419842} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.280123] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 823.280389] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590922', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'name': 'volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29e9555b-f928-43e7-a3a3-869ed07d7326', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'serial': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 823.281160] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e09e83-ddaa-49c7-bbd7-15e69cc4321f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.296124] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73c6d98-3767-4e70-af1a-03452a0e0659 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.317477] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7/volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.318747] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f5148d9-f24c-4bea-a225-8d7f23cf8872 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.331932] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquiring lock "b92efa60-ef18-4578-b00d-6a2438e7eacf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.332195] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "b92efa60-ef18-4578-b00d-6a2438e7eacf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.332400] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquiring lock "b92efa60-ef18-4578-b00d-6a2438e7eacf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.332579] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "b92efa60-ef18-4578-b00d-6a2438e7eacf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.332746] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "b92efa60-ef18-4578-b00d-6a2438e7eacf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.334853] env[68437]: INFO nova.compute.manager [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Terminating instance [ 823.340401] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for the task: (returnval){ [ 823.340401] env[68437]: value = "task-2944081" [ 823.340401] env[68437]: _type = "Task" [ 823.340401] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.351144] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.445358] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquiring lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.445753] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.445978] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquiring lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.446559] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.446797] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.453390] env[68437]: INFO nova.compute.manager [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Terminating instance [ 823.494614] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.594860] env[68437]: DEBUG oslo_concurrency.lockutils [None req-288e4058-2eff-4116-96f1-123cc4a2b589 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.474s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.601141] env[68437]: DEBUG nova.network.neutron [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Successfully created port: 9e2a02d6-0496-4807-b04b-bcedf775cfa4 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.606128] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60d20874-2509-4d3c-9486-6a922e0015d5 tempest-ServerExternalEventsTest-632306964 tempest-ServerExternalEventsTest-632306964-project] Releasing lock "refresh_cache-c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.635712] env[68437]: DEBUG nova.compute.manager [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 823.743034] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944080, 'name': ReconfigVM_Task, 'duration_secs': 0.349733} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.743034] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 8ccd7176-55c0-4118-a07e-3c4bdbba9795/8ccd7176-55c0-4118-a07e-3c4bdbba9795.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.743034] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9545f5a0-0cd4-499b-8b76-c70a2a64dbe5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.748906] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 823.748906] env[68437]: value = "task-2944082" [ 823.748906] env[68437]: _type = "Task" [ 823.748906] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.757179] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944082, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.786638] env[68437]: DEBUG nova.network.neutron [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 823.813263] env[68437]: DEBUG nova.compute.manager [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Received event network-vif-plugged-8aea055c-08c3-4b2a-ba4e-4aa831098aff {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 823.813542] env[68437]: DEBUG oslo_concurrency.lockutils [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] Acquiring lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.813732] env[68437]: DEBUG oslo_concurrency.lockutils [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] Lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.814025] env[68437]: DEBUG oslo_concurrency.lockutils [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] Lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.814152] env[68437]: DEBUG nova.compute.manager [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] No waiting events found dispatching network-vif-plugged-8aea055c-08c3-4b2a-ba4e-4aa831098aff {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 823.814359] env[68437]: WARNING nova.compute.manager [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Received unexpected event network-vif-plugged-8aea055c-08c3-4b2a-ba4e-4aa831098aff for instance with vm_state building and task_state spawning. [ 823.814562] env[68437]: DEBUG nova.compute.manager [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Received event network-changed-8aea055c-08c3-4b2a-ba4e-4aa831098aff {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 823.814848] env[68437]: DEBUG nova.compute.manager [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Refreshing instance network info cache due to event network-changed-8aea055c-08c3-4b2a-ba4e-4aa831098aff. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 823.816850] env[68437]: DEBUG oslo_concurrency.lockutils [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] Acquiring lock "refresh_cache-7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.838690] env[68437]: DEBUG nova.compute.manager [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 823.838943] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.839872] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fa37dc-ea76-4a61-830d-b379dadc3bb6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.855070] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.855070] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.857682] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1911d216-fa28-434e-869b-e983a84d9bdf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.867020] env[68437]: DEBUG oslo_vmware.api [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 823.867020] env[68437]: value = "task-2944083" [ 823.867020] env[68437]: _type = "Task" [ 823.867020] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.875558] env[68437]: DEBUG oslo_vmware.api [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2944083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.960316] env[68437]: DEBUG nova.compute.manager [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 823.960742] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.962154] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f82bda-5953-4b1d-ad4b-e5a624986b91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.972995] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.976558] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ecaf864-eb86-4a06-bce8-2d61e8f3a4e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.984073] env[68437]: DEBUG oslo_vmware.api [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 823.984073] env[68437]: value = "task-2944084" [ 823.984073] env[68437]: _type = "Task" [ 823.984073] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.996303] env[68437]: DEBUG oslo_vmware.api [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.033375] env[68437]: DEBUG nova.network.neutron [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Updating instance_info_cache with network_info: [{"id": "8aea055c-08c3-4b2a-ba4e-4aa831098aff", "address": "fa:16:3e:69:26:1a", "network": {"id": "31d7b9e2-4243-4a9f-bce5-3124599a9ef2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2048017741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc43ad02f60c41178dc8b891b605843d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aea055c-08", "ovs_interfaceid": "8aea055c-08c3-4b2a-ba4e-4aa831098aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.260039] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944082, 'name': Rename_Task, 'duration_secs': 0.141914} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.260338] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.260588] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4517d5c-e42e-49ca-be1c-60f94d227139 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.267608] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 824.267608] env[68437]: value = "task-2944085" [ 824.267608] env[68437]: _type = "Task" [ 824.267608] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.279215] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944085, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.335386] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00011574-a649-43dd-b96f-27659ef10201 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.349962] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a184a053-953d-4ff6-bd82-1011bf2f1ce0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.358494] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944081, 'name': ReconfigVM_Task, 'duration_secs': 0.678725} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.384935] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Reconfigured VM instance instance-0000002a to attach disk [datastore1] volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7/volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.390338] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dab99fea-a21d-48bc-8c3f-3d240e27101d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.403611] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fd7cd0-4f7b-4d14-8959-d573b4f94925 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.411102] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for the task: (returnval){ [ 824.411102] env[68437]: value = "task-2944086" [ 824.411102] env[68437]: _type = "Task" [ 824.411102] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.415744] env[68437]: DEBUG oslo_vmware.api [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2944083, 'name': PowerOffVM_Task, 'duration_secs': 0.215869} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.419684] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d91a6d-2119-4e71-bf0d-d93d8bbce02a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.423710] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.423887] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 824.424140] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a30bdcd2-8eaa-46cc-b273-a88e714f5aa5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.431559] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944086, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.440315] env[68437]: DEBUG nova.compute.provider_tree [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.489948] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 824.490263] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 824.490378] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Deleting the datastore file [datastore1] b92efa60-ef18-4578-b00d-6a2438e7eacf {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.490993] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83b37b33-e958-418d-b619-7d7fd428461d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.495823] env[68437]: DEBUG oslo_vmware.api [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944084, 'name': PowerOffVM_Task, 'duration_secs': 0.32682} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.496994] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.497183] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 824.497604] env[68437]: DEBUG oslo_vmware.api [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for the task: (returnval){ [ 824.497604] env[68437]: value = "task-2944088" [ 824.497604] env[68437]: _type = "Task" [ 824.497604] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.497770] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b87fe82-621b-489f-aa93-d2c09acda61d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.507009] env[68437]: DEBUG oslo_vmware.api [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2944088, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.535238] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.535499] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.537122] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Releasing lock "refresh_cache-7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.537432] env[68437]: DEBUG nova.compute.manager [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Instance network_info: |[{"id": "8aea055c-08c3-4b2a-ba4e-4aa831098aff", "address": "fa:16:3e:69:26:1a", "network": {"id": "31d7b9e2-4243-4a9f-bce5-3124599a9ef2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2048017741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc43ad02f60c41178dc8b891b605843d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aea055c-08", "ovs_interfaceid": "8aea055c-08c3-4b2a-ba4e-4aa831098aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 824.537710] env[68437]: DEBUG oslo_concurrency.lockutils [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] Acquired lock "refresh_cache-7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.537894] env[68437]: DEBUG nova.network.neutron [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Refreshing network info cache for port 8aea055c-08c3-4b2a-ba4e-4aa831098aff {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 824.540139] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:26:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '778b9a40-d603-4765-ac88-bd6d42c457a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8aea055c-08c3-4b2a-ba4e-4aa831098aff', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 824.550264] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Creating folder: Project (fc43ad02f60c41178dc8b891b605843d). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 824.554173] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55d488e9-f6d8-4b5b-9949-a0f31c192707 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.566645] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Created folder: Project (fc43ad02f60c41178dc8b891b605843d) in parent group-v590848. [ 824.566927] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Creating folder: Instances. Parent ref: group-v590972. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 824.567234] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6f98b9a-7ba9-4b56-aec6-2647c9ce2a61 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.575111] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 824.575407] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 824.575736] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Deleting the datastore file [datastore1] c5af19d6-5534-45e6-8c9c-dacf30d4fb1a {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.575937] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fa531f4-5437-4c94-95a8-110eafb09eeb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.580015] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Created folder: Instances in parent group-v590972. [ 824.580266] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 824.580489] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 824.580708] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5254fa58-6222-466a-83fb-5ebbef6525a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.599129] env[68437]: DEBUG oslo_vmware.api [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for the task: (returnval){ [ 824.599129] env[68437]: value = "task-2944092" [ 824.599129] env[68437]: _type = "Task" [ 824.599129] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.606223] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.606223] env[68437]: value = "task-2944093" [ 824.606223] env[68437]: _type = "Task" [ 824.606223] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.612478] env[68437]: DEBUG oslo_vmware.api [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.616883] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944093, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.647034] env[68437]: DEBUG nova.compute.manager [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 824.692894] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 824.693169] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 824.693321] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 824.693507] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 824.693652] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 824.693824] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 824.694024] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 824.694171] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 824.694345] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 824.694522] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 824.694764] env[68437]: DEBUG nova.virt.hardware [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 824.695824] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded0724c-9e08-445d-a041-c1075a20ec5a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.705438] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6780aa-ee8d-4d80-b54a-6ae8145ab260 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.780605] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944085, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.925118] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944086, 'name': ReconfigVM_Task, 'duration_secs': 0.13684} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.927989] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590922', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'name': 'volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29e9555b-f928-43e7-a3a3-869ed07d7326', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'serial': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 824.928593] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5c97062-9d0c-409f-88de-f3fd081ab9e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.935482] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for the task: (returnval){ [ 824.935482] env[68437]: value = "task-2944094" [ 824.935482] env[68437]: _type = "Task" [ 824.935482] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.945148] env[68437]: DEBUG nova.scheduler.client.report [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 824.948510] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944094, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.009640] env[68437]: DEBUG oslo_vmware.api [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Task: {'id': task-2944088, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172045} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.009912] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 825.011272] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 825.011607] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 825.011901] env[68437]: INFO nova.compute.manager [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Took 1.17 seconds to destroy the instance on the hypervisor. [ 825.012338] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.016550] env[68437]: DEBUG nova.compute.manager [-] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 825.016733] env[68437]: DEBUG nova.network.neutron [-] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 825.051865] env[68437]: INFO nova.compute.manager [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Detaching volume 078ffb5e-17fc-4576-9e94-9314af92b778 [ 825.086921] env[68437]: DEBUG nova.network.neutron [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Updated VIF entry in instance network info cache for port 8aea055c-08c3-4b2a-ba4e-4aa831098aff. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 825.090952] env[68437]: DEBUG nova.network.neutron [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Updating instance_info_cache with network_info: [{"id": "8aea055c-08c3-4b2a-ba4e-4aa831098aff", "address": "fa:16:3e:69:26:1a", "network": {"id": "31d7b9e2-4243-4a9f-bce5-3124599a9ef2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2048017741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc43ad02f60c41178dc8b891b605843d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aea055c-08", "ovs_interfaceid": "8aea055c-08c3-4b2a-ba4e-4aa831098aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.110179] env[68437]: DEBUG oslo_vmware.api [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Task: {'id': task-2944092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176362} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.115369] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 825.115582] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 825.115769] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 825.115948] env[68437]: INFO nova.compute.manager [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 825.116216] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.117194] env[68437]: INFO nova.virt.block_device [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Attempting to driver detach volume 078ffb5e-17fc-4576-9e94-9314af92b778 from mountpoint /dev/sdb [ 825.117409] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 825.117588] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590966', 'volume_id': '078ffb5e-17fc-4576-9e94-9314af92b778', 'name': 'volume-078ffb5e-17fc-4576-9e94-9314af92b778', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cf394b0b-cb14-4ae1-81bb-622c951bfdab', 'attached_at': '', 'detached_at': '', 'volume_id': '078ffb5e-17fc-4576-9e94-9314af92b778', 'serial': '078ffb5e-17fc-4576-9e94-9314af92b778'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 825.117878] env[68437]: DEBUG nova.compute.manager [-] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 825.117939] env[68437]: DEBUG nova.network.neutron [-] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 825.120170] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c17ffa-f280-4fea-87ca-cfc3bce39343 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.147765] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3607a099-87b9-4078-b5b8-8f95bc829728 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.150624] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944093, 'name': CreateVM_Task, 'duration_secs': 0.492969} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.150791] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 825.151819] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.151984] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.152319] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 825.152558] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4c816eb-2145-4637-9088-f51e14e552a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.156543] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8acaa9f-fc3a-4e0a-8fc3-77dd7d9cbe55 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.159796] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 825.159796] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52931b51-9574-0dab-d942-02f8e9dae62d" [ 825.159796] env[68437]: _type = "Task" [ 825.159796] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.182145] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b67284b-87c5-4876-bd14-5a86f01a2bbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.189139] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52931b51-9574-0dab-d942-02f8e9dae62d, 'name': SearchDatastore_Task, 'duration_secs': 0.018573} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.189389] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.189723] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 825.189921] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.190087] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.190270] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.190609] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-460bde01-1a13-4a28-a526-c57a0e0de42e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.205257] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] The volume has not been displaced from its original location: [datastore1] volume-078ffb5e-17fc-4576-9e94-9314af92b778/volume-078ffb5e-17fc-4576-9e94-9314af92b778.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 825.210704] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Reconfiguring VM instance instance-00000011 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 825.211553] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82dbfe78-7cd6-4a14-81de-911cc332ec60 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.225179] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.226022] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 825.226514] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feb34d83-d60b-49eb-be46-ae955ee39775 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.231451] env[68437]: DEBUG oslo_vmware.api [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 825.231451] env[68437]: value = "task-2944095" [ 825.231451] env[68437]: _type = "Task" [ 825.231451] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.232634] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 825.232634] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523aeff3-ed08-467d-8833-fdd230370c35" [ 825.232634] env[68437]: _type = "Task" [ 825.232634] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.244055] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523aeff3-ed08-467d-8833-fdd230370c35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.247278] env[68437]: DEBUG oslo_vmware.api [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944095, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.278647] env[68437]: DEBUG oslo_vmware.api [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944085, 'name': PowerOnVM_Task, 'duration_secs': 0.537188} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.278912] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.279136] env[68437]: INFO nova.compute.manager [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Took 8.88 seconds to spawn the instance on the hypervisor. [ 825.279319] env[68437]: DEBUG nova.compute.manager [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 825.280107] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07670517-8044-4185-aafa-8b275e5c036d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.446395] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944094, 'name': Rename_Task, 'duration_secs': 0.253911} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.446643] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.446917] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20f31c79-97c2-46de-b900-45b8f73e632c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.450633] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.824s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.451213] env[68437]: DEBUG nova.compute.manager [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 825.455140] env[68437]: DEBUG oslo_concurrency.lockutils [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.589s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.458020] env[68437]: DEBUG oslo_concurrency.lockutils [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.461339] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.376s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.463129] env[68437]: DEBUG nova.objects.instance [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 825.464336] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for the task: (returnval){ [ 825.464336] env[68437]: value = "task-2944096" [ 825.464336] env[68437]: _type = "Task" [ 825.464336] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.476796] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944096, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.543323] env[68437]: DEBUG nova.network.neutron [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Successfully updated port: 9e2a02d6-0496-4807-b04b-bcedf775cfa4 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 825.585015] env[68437]: INFO nova.scheduler.client.report [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Deleted allocations for instance e3855111-7678-42c5-a37e-25e8587416aa [ 825.590189] env[68437]: DEBUG oslo_concurrency.lockutils [req-cdd94851-cf21-4a99-b44b-a4f763c1d863 req-9d76d963-58ee-4d8d-ad26-b0eee5a8bc31 service nova] Releasing lock "refresh_cache-7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.705465] env[68437]: DEBUG nova.compute.manager [req-70b1ff46-b331-4a74-aa63-3028aa737fe2 req-3ce37f8e-478f-4130-a508-00e2f320ba3f service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Received event network-vif-deleted-6e6fed15-cc65-4f1f-9bf5-6854202b2ad0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 825.705465] env[68437]: INFO nova.compute.manager [req-70b1ff46-b331-4a74-aa63-3028aa737fe2 req-3ce37f8e-478f-4130-a508-00e2f320ba3f service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Neutron deleted interface 6e6fed15-cc65-4f1f-9bf5-6854202b2ad0; detaching it from the instance and deleting it from the info cache [ 825.705626] env[68437]: DEBUG nova.network.neutron [req-70b1ff46-b331-4a74-aa63-3028aa737fe2 req-3ce37f8e-478f-4130-a508-00e2f320ba3f service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.747287] env[68437]: DEBUG oslo_vmware.api [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944095, 'name': ReconfigVM_Task, 'duration_secs': 0.372689} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.751595] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Reconfigured VM instance instance-00000011 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 825.757026] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523aeff3-ed08-467d-8833-fdd230370c35, 'name': SearchDatastore_Task, 'duration_secs': 0.02324} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.757026] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2e78fbb-a4da-41cf-957b-2520d4bb9838 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.769059] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13237669-03d5-4837-8320-d3c6056e53c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.777120] env[68437]: DEBUG oslo_vmware.api [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 825.777120] env[68437]: value = "task-2944097" [ 825.777120] env[68437]: _type = "Task" [ 825.777120] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.777120] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 825.777120] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523c524e-2cf0-fe2d-ae9c-a6c39d1113e0" [ 825.777120] env[68437]: _type = "Task" [ 825.777120] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.788568] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523c524e-2cf0-fe2d-ae9c-a6c39d1113e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.796486] env[68437]: DEBUG oslo_vmware.api [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944097, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.800889] env[68437]: INFO nova.compute.manager [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Took 54.20 seconds to build instance. [ 825.859222] env[68437]: DEBUG nova.compute.manager [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Received event network-vif-plugged-9e2a02d6-0496-4807-b04b-bcedf775cfa4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 825.859343] env[68437]: DEBUG oslo_concurrency.lockutils [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] Acquiring lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.859546] env[68437]: DEBUG oslo_concurrency.lockutils [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] Lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.859714] env[68437]: DEBUG oslo_concurrency.lockutils [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] Lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.859988] env[68437]: DEBUG nova.compute.manager [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] No waiting events found dispatching network-vif-plugged-9e2a02d6-0496-4807-b04b-bcedf775cfa4 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 825.860747] env[68437]: WARNING nova.compute.manager [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Received unexpected event network-vif-plugged-9e2a02d6-0496-4807-b04b-bcedf775cfa4 for instance with vm_state building and task_state spawning. [ 825.860747] env[68437]: DEBUG nova.compute.manager [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Received event network-changed-9e2a02d6-0496-4807-b04b-bcedf775cfa4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 825.860747] env[68437]: DEBUG nova.compute.manager [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Refreshing instance network info cache due to event network-changed-9e2a02d6-0496-4807-b04b-bcedf775cfa4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 825.860747] env[68437]: DEBUG oslo_concurrency.lockutils [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] Acquiring lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.860747] env[68437]: DEBUG oslo_concurrency.lockutils [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] Acquired lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.860956] env[68437]: DEBUG nova.network.neutron [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Refreshing network info cache for port 9e2a02d6-0496-4807-b04b-bcedf775cfa4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 825.970180] env[68437]: DEBUG nova.compute.utils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 825.971653] env[68437]: DEBUG nova.compute.manager [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 825.972116] env[68437]: DEBUG nova.network.neutron [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 825.984491] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944096, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.026715] env[68437]: DEBUG nova.policy [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e8a11819f91e486b86a9cc41c1fd7ec5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b002244273f41d89ddf47570ffe6a02', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 826.045494] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.095036] env[68437]: DEBUG oslo_concurrency.lockutils [None req-15d02d76-d6f1-4eee-90fd-f7d6f9bc07a9 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "e3855111-7678-42c5-a37e-25e8587416aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.776s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.152908] env[68437]: DEBUG nova.network.neutron [-] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.209662] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83ccd555-fca4-41ad-9460-ba7f3d13141f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.221503] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c98e81-0672-4f3c-b92f-cc6f9814d5cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.263275] env[68437]: DEBUG nova.compute.manager [req-70b1ff46-b331-4a74-aa63-3028aa737fe2 req-3ce37f8e-478f-4130-a508-00e2f320ba3f service nova] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Detach interface failed, port_id=6e6fed15-cc65-4f1f-9bf5-6854202b2ad0, reason: Instance c5af19d6-5534-45e6-8c9c-dacf30d4fb1a could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 826.277057] env[68437]: DEBUG nova.network.neutron [-] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.289875] env[68437]: DEBUG oslo_vmware.api [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944097, 'name': ReconfigVM_Task, 'duration_secs': 0.148562} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.293273] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590966', 'volume_id': '078ffb5e-17fc-4576-9e94-9314af92b778', 'name': 'volume-078ffb5e-17fc-4576-9e94-9314af92b778', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cf394b0b-cb14-4ae1-81bb-622c951bfdab', 'attached_at': '', 'detached_at': '', 'volume_id': '078ffb5e-17fc-4576-9e94-9314af92b778', 'serial': '078ffb5e-17fc-4576-9e94-9314af92b778'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 826.296366] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523c524e-2cf0-fe2d-ae9c-a6c39d1113e0, 'name': SearchDatastore_Task, 'duration_secs': 0.014262} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.299171] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.299171] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8/7ae346fa-fbb2-4fd7-b620-f0dda8243ca8.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 826.299171] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9b70e17-53f7-4026-ae69-7e1be287a2ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.302942] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96f8de8c-7bb0-404f-b55e-9e5594ff074c tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.883s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.309350] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 826.309350] env[68437]: value = "task-2944098" [ 826.309350] env[68437]: _type = "Task" [ 826.309350] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.317597] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.415876] env[68437]: DEBUG nova.network.neutron [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Successfully created port: d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 826.433204] env[68437]: DEBUG nova.network.neutron [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 826.476138] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6afe4016-48bb-4fb3-ac6b-f79e536a31e9 tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.479206] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.771s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.481400] env[68437]: INFO nova.compute.claims [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.484489] env[68437]: DEBUG nova.compute.manager [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 826.492874] env[68437]: DEBUG oslo_vmware.api [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944096, 'name': PowerOnVM_Task, 'duration_secs': 0.82604} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.493354] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.497019] env[68437]: INFO nova.compute.manager [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Took 5.58 seconds to spawn the instance on the hypervisor. [ 826.497019] env[68437]: DEBUG nova.compute.manager [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.497019] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e92202-b4b4-4013-80a5-36ff15224998 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.612336] env[68437]: DEBUG nova.network.neutron [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.655753] env[68437]: INFO nova.compute.manager [-] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Took 1.54 seconds to deallocate network for instance. [ 826.786026] env[68437]: INFO nova.compute.manager [-] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Took 1.77 seconds to deallocate network for instance. [ 826.807571] env[68437]: DEBUG nova.compute.manager [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 826.827023] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944098, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.877508] env[68437]: DEBUG nova.objects.instance [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lazy-loading 'flavor' on Instance uuid cf394b0b-cb14-4ae1-81bb-622c951bfdab {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 827.013346] env[68437]: INFO nova.compute.manager [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Took 48.74 seconds to build instance. [ 827.117997] env[68437]: DEBUG oslo_concurrency.lockutils [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] Releasing lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.117997] env[68437]: DEBUG nova.compute.manager [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Received event network-vif-deleted-9457e907-17df-45cc-b8da-a57bf9901e34 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 827.117997] env[68437]: INFO nova.compute.manager [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Neutron deleted interface 9457e907-17df-45cc-b8da-a57bf9901e34; detaching it from the instance and deleting it from the info cache [ 827.117997] env[68437]: DEBUG nova.network.neutron [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.117997] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.118313] env[68437]: DEBUG nova.network.neutron [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 827.137038] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "f1230046-d368-40ee-b1fa-99df4ab15a10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.137296] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "f1230046-d368-40ee-b1fa-99df4ab15a10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.140023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "f1230046-d368-40ee-b1fa-99df4ab15a10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.140023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "f1230046-d368-40ee-b1fa-99df4ab15a10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.140023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "f1230046-d368-40ee-b1fa-99df4ab15a10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.141038] env[68437]: INFO nova.compute.manager [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Terminating instance [ 827.162846] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.296580] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.324790] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944098, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.651746} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.329552] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8/7ae346fa-fbb2-4fd7-b620-f0dda8243ca8.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 827.329775] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 827.330324] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4d30523e-cf36-4963-aa31-09054802d7e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.343842] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 827.343842] env[68437]: value = "task-2944099" [ 827.343842] env[68437]: _type = "Task" [ 827.343842] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.355474] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944099, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.361442] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.500244] env[68437]: DEBUG nova.compute.manager [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 827.515012] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e82728a9-784b-4d34-a239-b77b277e32d8 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "29e9555b-f928-43e7-a3a3-869ed07d7326" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.132s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.555071] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 827.555340] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.555497] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 827.555675] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.555825] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 827.555972] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 827.556876] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 827.557010] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 827.557196] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 827.557375] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 827.557551] env[68437]: DEBUG nova.virt.hardware [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 827.558465] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1836dec-3e4a-4899-aac2-b1e5b0c3fbf0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.569853] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ff77aa-fd6b-4ceb-abed-95b9b39937bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.625720] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-acaaab91-bd1a-4e28-8222-15cb884dbfaf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.634820] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245f0e91-d9ba-4769-848d-cffbb984a889 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.648484] env[68437]: DEBUG nova.compute.manager [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 827.648750] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.650407] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c249cadb-e941-492b-8041-731dab4e8ba5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.661364] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 827.661364] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa6f399f-d0a3-4d0b-abde-92f08fb89a57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.680397] env[68437]: DEBUG nova.compute.manager [req-90504e53-578c-4a29-b8bb-69f7e326b231 req-81f72a7f-7106-4470-a20e-71e1bfcd0dea service nova] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Detach interface failed, port_id=9457e907-17df-45cc-b8da-a57bf9901e34, reason: Instance b92efa60-ef18-4578-b00d-6a2438e7eacf could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 827.683552] env[68437]: DEBUG oslo_vmware.api [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 827.683552] env[68437]: value = "task-2944100" [ 827.683552] env[68437]: _type = "Task" [ 827.683552] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.689090] env[68437]: DEBUG nova.network.neutron [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 827.699119] env[68437]: DEBUG oslo_vmware.api [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2944100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.858593] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944099, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071187} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.862460] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.864111] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c49ae89-3e7d-4d7b-87a5-cc2e1dd9c1a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.896197] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8/7ae346fa-fbb2-4fd7-b620-f0dda8243ca8.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.899385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6247ff07-1cab-469d-8e48-03f7ce4e605f tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.364s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.900417] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44a31953-9df2-473b-945f-bc932262cd7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.927245] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 827.927245] env[68437]: value = "task-2944101" [ 827.927245] env[68437]: _type = "Task" [ 827.927245] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.934323] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.021132] env[68437]: DEBUG nova.compute.manager [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 828.046602] env[68437]: DEBUG nova.network.neutron [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Updating instance_info_cache with network_info: [{"id": "9e2a02d6-0496-4807-b04b-bcedf775cfa4", "address": "fa:16:3e:f2:e2:7c", "network": {"id": "1c341ad6-0e1b-4211-be89-d39bb74a6f59", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-101911256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ae9811689c645a7af2096a600ed6e1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2a02d6-04", "ovs_interfaceid": "9e2a02d6-0496-4807-b04b-bcedf775cfa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.143256] env[68437]: DEBUG nova.network.neutron [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Successfully updated port: d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 828.154506] env[68437]: DEBUG nova.compute.manager [req-b3561349-dd3f-4710-a9bd-555feb5f5d94 req-20642fb9-9d96-4f44-8511-c59124871a21 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Received event network-changed-4ec75d03-3ee1-480d-ab6a-acc211fd6bae {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 828.154728] env[68437]: DEBUG nova.compute.manager [req-b3561349-dd3f-4710-a9bd-555feb5f5d94 req-20642fb9-9d96-4f44-8511-c59124871a21 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Refreshing instance network info cache due to event network-changed-4ec75d03-3ee1-480d-ab6a-acc211fd6bae. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 828.155061] env[68437]: DEBUG oslo_concurrency.lockutils [req-b3561349-dd3f-4710-a9bd-555feb5f5d94 req-20642fb9-9d96-4f44-8511-c59124871a21 service nova] Acquiring lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.155168] env[68437]: DEBUG oslo_concurrency.lockutils [req-b3561349-dd3f-4710-a9bd-555feb5f5d94 req-20642fb9-9d96-4f44-8511-c59124871a21 service nova] Acquired lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.155338] env[68437]: DEBUG nova.network.neutron [req-b3561349-dd3f-4710-a9bd-555feb5f5d94 req-20642fb9-9d96-4f44-8511-c59124871a21 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Refreshing network info cache for port 4ec75d03-3ee1-480d-ab6a-acc211fd6bae {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 828.197559] env[68437]: DEBUG oslo_vmware.api [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2944100, 'name': PowerOffVM_Task, 'duration_secs': 0.446535} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.197559] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 828.197559] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 828.197559] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5be6b96c-06b2-4192-81de-7f34a9b61c67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.257782] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3ede16-f564-4ecb-bd9e-c9da617c1420 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.265553] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb5d427-69c0-4ba2-bb18-6c1ca378224f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.296336] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa4521e-f4c2-48ac-9ef1-a8d0476e1929 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.303797] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a8a5fc-6de0-45a2-b038-242176f67513 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.317593] env[68437]: DEBUG nova.compute.provider_tree [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.373316] env[68437]: DEBUG nova.compute.manager [req-d9a1b04a-04de-4da1-a507-95e36dc73031 req-fc3097ed-2bf7-4565-9810-c183ace7511a service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Received event network-vif-plugged-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 828.373316] env[68437]: DEBUG oslo_concurrency.lockutils [req-d9a1b04a-04de-4da1-a507-95e36dc73031 req-fc3097ed-2bf7-4565-9810-c183ace7511a service nova] Acquiring lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.373316] env[68437]: DEBUG oslo_concurrency.lockutils [req-d9a1b04a-04de-4da1-a507-95e36dc73031 req-fc3097ed-2bf7-4565-9810-c183ace7511a service nova] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.373316] env[68437]: DEBUG oslo_concurrency.lockutils [req-d9a1b04a-04de-4da1-a507-95e36dc73031 req-fc3097ed-2bf7-4565-9810-c183ace7511a service nova] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.373316] env[68437]: DEBUG nova.compute.manager [req-d9a1b04a-04de-4da1-a507-95e36dc73031 req-fc3097ed-2bf7-4565-9810-c183ace7511a service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] No waiting events found dispatching network-vif-plugged-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 828.373539] env[68437]: WARNING nova.compute.manager [req-d9a1b04a-04de-4da1-a507-95e36dc73031 req-fc3097ed-2bf7-4565-9810-c183ace7511a service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Received unexpected event network-vif-plugged-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 for instance with vm_state building and task_state spawning. [ 828.438243] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944101, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.549692] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.549692] env[68437]: DEBUG nova.compute.manager [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Instance network_info: |[{"id": "9e2a02d6-0496-4807-b04b-bcedf775cfa4", "address": "fa:16:3e:f2:e2:7c", "network": {"id": "1c341ad6-0e1b-4211-be89-d39bb74a6f59", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-101911256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ae9811689c645a7af2096a600ed6e1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2a02d6-04", "ovs_interfaceid": "9e2a02d6-0496-4807-b04b-bcedf775cfa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 828.553067] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:e2:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '748a5204-8f14-402c-9a6e-f3e6104db082', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e2a02d6-0496-4807-b04b-bcedf775cfa4', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 828.563708] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 828.563708] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 828.563708] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2e157b4-f847-49b0-b08a-0332eb3d52f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.578032] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.582061] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 828.582061] env[68437]: value = "task-2944103" [ 828.582061] env[68437]: _type = "Task" [ 828.582061] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.589619] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944103, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.645526] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.645947] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.645947] env[68437]: DEBUG nova.network.neutron [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 828.822116] env[68437]: DEBUG nova.scheduler.client.report [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.936442] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944101, 'name': ReconfigVM_Task, 'duration_secs': 0.70428} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.936442] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8/7ae346fa-fbb2-4fd7-b620-f0dda8243ca8.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 828.936972] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e804f49d-d892-49af-b378-17013be88005 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.944298] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 828.944298] env[68437]: value = "task-2944104" [ 828.944298] env[68437]: _type = "Task" [ 828.944298] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.953986] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944104, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.093327] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944103, 'name': CreateVM_Task, 'duration_secs': 0.426432} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.093497] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.094303] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.094471] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.094789] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 829.095602] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2737a5e7-1fd7-460a-9f40-a87833de3434 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.105078] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 829.105078] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52072df2-a169-6ea4-d2f4-faf63aa4f92a" [ 829.105078] env[68437]: _type = "Task" [ 829.105078] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.120349] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52072df2-a169-6ea4-d2f4-faf63aa4f92a, 'name': SearchDatastore_Task, 'duration_secs': 0.009328} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.120475] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.120750] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.121019] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.121184] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.121361] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.121624] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-564973bf-63ee-46f1-b011-dae111bc8b79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.129979] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.130169] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 829.130882] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-308cd8fe-6a31-4ea9-90c7-98ee6e4c6508 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.134016] env[68437]: DEBUG nova.network.neutron [req-b3561349-dd3f-4710-a9bd-555feb5f5d94 req-20642fb9-9d96-4f44-8511-c59124871a21 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updated VIF entry in instance network info cache for port 4ec75d03-3ee1-480d-ab6a-acc211fd6bae. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 829.134122] env[68437]: DEBUG nova.network.neutron [req-b3561349-dd3f-4710-a9bd-555feb5f5d94 req-20642fb9-9d96-4f44-8511-c59124871a21 service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance_info_cache with network_info: [{"id": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "address": "fa:16:3e:cc:6f:88", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec75d03-3e", "ovs_interfaceid": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.140316] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 829.140316] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e0b04c-c50c-ddcc-6195-6d088262410e" [ 829.140316] env[68437]: _type = "Task" [ 829.140316] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.151526] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e0b04c-c50c-ddcc-6195-6d088262410e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.179745] env[68437]: DEBUG nova.network.neutron [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 829.315295] env[68437]: DEBUG nova.network.neutron [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updating instance_info_cache with network_info: [{"id": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "address": "fa:16:3e:b0:0c:e5", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22dccb0-3e", "ovs_interfaceid": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.317909] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 829.318166] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 829.318538] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Deleting the datastore file [datastore1] f1230046-d368-40ee-b1fa-99df4ab15a10 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 829.318615] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fb99cfc-3d89-4002-b1d1-9a927acb5875 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.326194] env[68437]: DEBUG oslo_vmware.api [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 829.326194] env[68437]: value = "task-2944105" [ 829.326194] env[68437]: _type = "Task" [ 829.326194] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.328372] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.849s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.328926] env[68437]: DEBUG nova.compute.manager [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 829.331676] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.125s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.331891] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.334596] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.465s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.335312] env[68437]: INFO nova.compute.claims [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.343487] env[68437]: DEBUG oslo_vmware.api [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2944105, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.389636] env[68437]: INFO nova.scheduler.client.report [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Deleted allocations for instance efed858a-44b9-45b7-8778-22183549088c [ 829.454726] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944104, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.637384] env[68437]: DEBUG oslo_concurrency.lockutils [req-b3561349-dd3f-4710-a9bd-555feb5f5d94 req-20642fb9-9d96-4f44-8511-c59124871a21 service nova] Releasing lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.650256] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e0b04c-c50c-ddcc-6195-6d088262410e, 'name': SearchDatastore_Task, 'duration_secs': 0.010189} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.651075] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82c96280-5aa2-4f4e-a523-b674fbda70c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.656500] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 829.656500] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527c2faf-d699-67ef-fb50-66bfe908964a" [ 829.656500] env[68437]: _type = "Task" [ 829.656500] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.663940] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527c2faf-d699-67ef-fb50-66bfe908964a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.818481] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.818607] env[68437]: DEBUG nova.compute.manager [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Instance network_info: |[{"id": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "address": "fa:16:3e:b0:0c:e5", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22dccb0-3e", "ovs_interfaceid": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 829.819019] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:0c:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd48f0ef6-34e5-44d4-8baf-4470ed96ce73', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd22dccb0-3e51-43b6-8bfe-4f6b83be5b62', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.826920] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.827229] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.830525] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-161b8524-3530-48a6-be0b-43a7a3c38456 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.847606] env[68437]: DEBUG nova.compute.utils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 829.849464] env[68437]: DEBUG nova.compute.manager [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 829.849652] env[68437]: DEBUG nova.network.neutron [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 829.857846] env[68437]: DEBUG oslo_vmware.api [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2944105, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.426247} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.857846] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 829.857846] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 829.857846] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.857846] env[68437]: INFO nova.compute.manager [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Took 2.21 seconds to destroy the instance on the hypervisor. [ 829.858188] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.858188] env[68437]: DEBUG nova.compute.manager [-] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 829.858188] env[68437]: DEBUG nova.network.neutron [-] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 829.860366] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.860366] env[68437]: value = "task-2944106" [ 829.860366] env[68437]: _type = "Task" [ 829.860366] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.869707] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944106, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.902587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4438dd72-7b78-4251-acf1-86e1ce3a4e83 tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "efed858a-44b9-45b7-8778-22183549088c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.227s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.918668] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.919483] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.956359] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944104, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.967620] env[68437]: DEBUG nova.policy [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17bdd32924094cc9a59a1cb1c27f0c36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4b1c4be5c524504ae9346d2e4ec8008', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 830.166591] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527c2faf-d699-67ef-fb50-66bfe908964a, 'name': SearchDatastore_Task, 'duration_secs': 0.010277} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.166875] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.167363] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] acbf4c5c-341c-4ebd-ad29-90ebf531aa86/acbf4c5c-341c-4ebd-ad29-90ebf531aa86.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 830.168219] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47f8eaf2-793b-468d-89d8-299473811deb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.175570] env[68437]: DEBUG nova.compute.manager [req-2d5289fb-2d95-4637-8631-7e6383fd1f79 req-fa3fe561-6094-4c2c-bdf0-a805e15234ad service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Received event network-changed-9edc8a0b-761d-4911-904e-9cb4a163bf7e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 830.175848] env[68437]: DEBUG nova.compute.manager [req-2d5289fb-2d95-4637-8631-7e6383fd1f79 req-fa3fe561-6094-4c2c-bdf0-a805e15234ad service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Refreshing instance network info cache due to event network-changed-9edc8a0b-761d-4911-904e-9cb4a163bf7e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 830.176093] env[68437]: DEBUG oslo_concurrency.lockutils [req-2d5289fb-2d95-4637-8631-7e6383fd1f79 req-fa3fe561-6094-4c2c-bdf0-a805e15234ad service nova] Acquiring lock "refresh_cache-29e9555b-f928-43e7-a3a3-869ed07d7326" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.176248] env[68437]: DEBUG oslo_concurrency.lockutils [req-2d5289fb-2d95-4637-8631-7e6383fd1f79 req-fa3fe561-6094-4c2c-bdf0-a805e15234ad service nova] Acquired lock "refresh_cache-29e9555b-f928-43e7-a3a3-869ed07d7326" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.176412] env[68437]: DEBUG nova.network.neutron [req-2d5289fb-2d95-4637-8631-7e6383fd1f79 req-fa3fe561-6094-4c2c-bdf0-a805e15234ad service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Refreshing network info cache for port 9edc8a0b-761d-4911-904e-9cb4a163bf7e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 830.179013] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 830.179013] env[68437]: value = "task-2944107" [ 830.179013] env[68437]: _type = "Task" [ 830.179013] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.189205] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.283057] env[68437]: DEBUG nova.network.neutron [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Successfully created port: 6c053c01-e575-4bdc-93ce-3604fa26d1ee {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.359027] env[68437]: DEBUG nova.compute.manager [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 830.370625] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944106, 'name': CreateVM_Task, 'duration_secs': 0.390403} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.370991] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.371825] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.372127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.372570] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 830.372928] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55e4f79a-41fe-48e7-927f-b1ec46faa3ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.378266] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 830.378266] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529efec4-8d33-4f03-4b81-7aada486575f" [ 830.378266] env[68437]: _type = "Task" [ 830.378266] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.388768] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529efec4-8d33-4f03-4b81-7aada486575f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.455451] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944104, 'name': Rename_Task, 'duration_secs': 1.177412} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.456078] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 830.456663] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d010a80-1a3d-4745-a452-cb45887ef045 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.467811] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 830.467811] env[68437]: value = "task-2944108" [ 830.467811] env[68437]: _type = "Task" [ 830.467811] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.479700] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.493874] env[68437]: DEBUG nova.compute.manager [req-c14470a1-340c-4e0c-a955-3f1faf03dff0 req-0ff6828d-d64c-4ccf-b4b1-bf63240fb05c service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Received event network-changed-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 830.494090] env[68437]: DEBUG nova.compute.manager [req-c14470a1-340c-4e0c-a955-3f1faf03dff0 req-0ff6828d-d64c-4ccf-b4b1-bf63240fb05c service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Refreshing instance network info cache due to event network-changed-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 830.494406] env[68437]: DEBUG oslo_concurrency.lockutils [req-c14470a1-340c-4e0c-a955-3f1faf03dff0 req-0ff6828d-d64c-4ccf-b4b1-bf63240fb05c service nova] Acquiring lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.494497] env[68437]: DEBUG oslo_concurrency.lockutils [req-c14470a1-340c-4e0c-a955-3f1faf03dff0 req-0ff6828d-d64c-4ccf-b4b1-bf63240fb05c service nova] Acquired lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.494681] env[68437]: DEBUG nova.network.neutron [req-c14470a1-340c-4e0c-a955-3f1faf03dff0 req-0ff6828d-d64c-4ccf-b4b1-bf63240fb05c service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Refreshing network info cache for port d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 830.694569] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944107, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.790982] env[68437]: DEBUG nova.network.neutron [-] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.889993] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529efec4-8d33-4f03-4b81-7aada486575f, 'name': SearchDatastore_Task, 'duration_secs': 0.010368} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.890317] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.892797] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.892797] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.892797] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.892797] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.892797] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1752b92d-a192-49c5-95bb-6c7aaf3337f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.903669] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.903857] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 830.904622] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06ffc71a-0960-4d27-857f-fb418ada0a29 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.915582] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 830.915582] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e290bd-9937-0a3e-94ef-a79ecccc1a70" [ 830.915582] env[68437]: _type = "Task" [ 830.915582] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.924479] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e290bd-9937-0a3e-94ef-a79ecccc1a70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.984640] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944108, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.001472] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da827626-224e-4ae5-981b-4a8e3c91b25f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.012803] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac216b0-71c7-4922-8263-bde09c315d17 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.053970] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213a5c45-523b-41e5-9e90-fd1be1277dc6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.063164] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d25b7a-b0cb-477d-9749-0ac9df8eda35 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.076348] env[68437]: DEBUG nova.compute.provider_tree [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.136959] env[68437]: DEBUG nova.network.neutron [req-2d5289fb-2d95-4637-8631-7e6383fd1f79 req-fa3fe561-6094-4c2c-bdf0-a805e15234ad service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Updated VIF entry in instance network info cache for port 9edc8a0b-761d-4911-904e-9cb4a163bf7e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 831.137391] env[68437]: DEBUG nova.network.neutron [req-2d5289fb-2d95-4637-8631-7e6383fd1f79 req-fa3fe561-6094-4c2c-bdf0-a805e15234ad service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Updating instance_info_cache with network_info: [{"id": "9edc8a0b-761d-4911-904e-9cb4a163bf7e", "address": "fa:16:3e:a6:19:7e", "network": {"id": "27a6bae9-e605-41e4-86ef-8e2ff40dc05d", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-2117536478-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "524f14821876408ab47b277081b145d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9edc8a0b-76", "ovs_interfaceid": "9edc8a0b-761d-4911-904e-9cb4a163bf7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.192229] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944107, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643195} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.192543] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] acbf4c5c-341c-4ebd-ad29-90ebf531aa86/acbf4c5c-341c-4ebd-ad29-90ebf531aa86.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 831.192801] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 831.193079] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af83331a-7412-4b70-abb4-c60db6a2f90a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.200079] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 831.200079] env[68437]: value = "task-2944109" [ 831.200079] env[68437]: _type = "Task" [ 831.200079] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.208519] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944109, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.242841] env[68437]: DEBUG nova.network.neutron [req-c14470a1-340c-4e0c-a955-3f1faf03dff0 req-0ff6828d-d64c-4ccf-b4b1-bf63240fb05c service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updated VIF entry in instance network info cache for port d22dccb0-3e51-43b6-8bfe-4f6b83be5b62. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 831.243276] env[68437]: DEBUG nova.network.neutron [req-c14470a1-340c-4e0c-a955-3f1faf03dff0 req-0ff6828d-d64c-4ccf-b4b1-bf63240fb05c service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updating instance_info_cache with network_info: [{"id": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "address": "fa:16:3e:b0:0c:e5", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22dccb0-3e", "ovs_interfaceid": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.293680] env[68437]: INFO nova.compute.manager [-] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Took 1.44 seconds to deallocate network for instance. [ 831.371657] env[68437]: DEBUG nova.compute.manager [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 831.425657] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e290bd-9937-0a3e-94ef-a79ecccc1a70, 'name': SearchDatastore_Task, 'duration_secs': 0.028554} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.428114] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:39:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d926657d-4f9c-4856-bc8e-d77db590d34a',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1416034897',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.428681] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.432029] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.432029] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.432029] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.432029] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.432029] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.432357] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.432357] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.432357] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.432357] env[68437]: DEBUG nova.virt.hardware [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.432357] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef42186-af30-46e4-96f3-bc89028964b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.435876] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-814aaf71-9f74-450e-8b0a-5a56c1a4cad1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.445635] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c64115-bdc8-46e3-9213-f5ba83ab5d7d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.449523] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 831.449523] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521c2a7e-7e1f-5177-c0ef-2955702a4d0c" [ 831.449523] env[68437]: _type = "Task" [ 831.449523] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.467072] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521c2a7e-7e1f-5177-c0ef-2955702a4d0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.475084] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944108, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.582013] env[68437]: DEBUG nova.scheduler.client.report [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.640287] env[68437]: DEBUG oslo_concurrency.lockutils [req-2d5289fb-2d95-4637-8631-7e6383fd1f79 req-fa3fe561-6094-4c2c-bdf0-a805e15234ad service nova] Releasing lock "refresh_cache-29e9555b-f928-43e7-a3a3-869ed07d7326" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.709811] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944109, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064999} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.710139] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.710949] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c5bb40-f3d3-4f97-8198-43c1be4960ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.732464] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] acbf4c5c-341c-4ebd-ad29-90ebf531aa86/acbf4c5c-341c-4ebd-ad29-90ebf531aa86.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.732738] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eee19164-c155-4591-b8d4-25065ed02a33 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.747676] env[68437]: DEBUG oslo_concurrency.lockutils [req-c14470a1-340c-4e0c-a955-3f1faf03dff0 req-0ff6828d-d64c-4ccf-b4b1-bf63240fb05c service nova] Releasing lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.752864] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 831.752864] env[68437]: value = "task-2944110" [ 831.752864] env[68437]: _type = "Task" [ 831.752864] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.760962] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944110, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.802311] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.907427] env[68437]: DEBUG nova.network.neutron [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Successfully updated port: 6c053c01-e575-4bdc-93ce-3604fa26d1ee {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.960426] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521c2a7e-7e1f-5177-c0ef-2955702a4d0c, 'name': SearchDatastore_Task, 'duration_secs': 0.025727} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.960695] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.960948] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] a01364f9-e30d-4140-ae41-1e7c4aaa2251/a01364f9-e30d-4140-ae41-1e7c4aaa2251.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.961265] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cec8d3e-6727-4f76-8f39-ceaa71899269 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.973214] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 831.973214] env[68437]: value = "task-2944111" [ 831.973214] env[68437]: _type = "Task" [ 831.973214] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.984087] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944108, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.989633] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944111, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.089515] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.755s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.090289] env[68437]: DEBUG nova.compute.manager [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 832.094192] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.989s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.094499] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.097452] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.390s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.101017] env[68437]: INFO nova.compute.claims [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.232470] env[68437]: INFO nova.scheduler.client.report [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Deleted allocations for instance 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb [ 832.276227] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944110, 'name': ReconfigVM_Task, 'duration_secs': 0.456223} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.276227] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Reconfigured VM instance instance-0000002c to attach disk [datastore1] acbf4c5c-341c-4ebd-ad29-90ebf531aa86/acbf4c5c-341c-4ebd-ad29-90ebf531aa86.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.276227] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afca11ef-fdf0-4f54-995f-907ea9103c6a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.280443] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 832.280443] env[68437]: value = "task-2944112" [ 832.280443] env[68437]: _type = "Task" [ 832.280443] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.290853] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944112, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.410261] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.410261] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.410261] env[68437]: DEBUG nova.network.neutron [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 832.430750] env[68437]: DEBUG nova.compute.manager [req-d851e50a-8eb8-42a0-8e98-ad1c4b522547 req-69f9eb30-a417-48a8-b9b1-8db6c4ee0290 service nova] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Received event network-vif-deleted-c57512fa-960c-429b-97d3-2d33ecf31cf0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 832.482475] env[68437]: DEBUG oslo_vmware.api [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944108, 'name': PowerOnVM_Task, 'duration_secs': 1.606216} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.482475] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 832.482779] env[68437]: INFO nova.compute.manager [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Took 10.57 seconds to spawn the instance on the hypervisor. [ 832.482843] env[68437]: DEBUG nova.compute.manager [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 832.483644] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3313e2de-8b8b-4b0a-8b22-cac338194c9c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.489172] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944111, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508537} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.489735] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] a01364f9-e30d-4140-ae41-1e7c4aaa2251/a01364f9-e30d-4140-ae41-1e7c4aaa2251.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 832.489948] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.490193] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da4bc095-dad6-4aa7-9b8b-aac267bf0a6e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.502038] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 832.502038] env[68437]: value = "task-2944113" [ 832.502038] env[68437]: _type = "Task" [ 832.502038] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.510020] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944113, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.605613] env[68437]: DEBUG nova.compute.utils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 832.609470] env[68437]: DEBUG nova.compute.manager [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 832.609639] env[68437]: DEBUG nova.network.neutron [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 832.654078] env[68437]: DEBUG nova.policy [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42280bc8e492437aa17259ace66e1601', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18b5eecfb2734eaf8288932f146e3d5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 832.741124] env[68437]: DEBUG oslo_concurrency.lockutils [None req-542347fc-3c22-4ef6-bf9d-90b70709045b tempest-ServersListShow298Test-1548744480 tempest-ServersListShow298Test-1548744480-project-member] Lock "5202b708-179c-48d2-9c4e-2bb5ab1a6ebb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.894s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.767019] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "45595615-59c0-4c59-b18c-b49a3126dbb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.767019] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "45595615-59c0-4c59-b18c-b49a3126dbb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.767019] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "45595615-59c0-4c59-b18c-b49a3126dbb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.767019] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "45595615-59c0-4c59-b18c-b49a3126dbb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.767346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "45595615-59c0-4c59-b18c-b49a3126dbb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.768532] env[68437]: INFO nova.compute.manager [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Terminating instance [ 832.791986] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944112, 'name': Rename_Task, 'duration_secs': 0.252047} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.792449] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 832.793445] env[68437]: DEBUG nova.compute.manager [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Received event network-vif-plugged-6c053c01-e575-4bdc-93ce-3604fa26d1ee {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 832.793634] env[68437]: DEBUG oslo_concurrency.lockutils [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] Acquiring lock "b7706bf2-936f-439c-8e9f-b2241d0c211c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.793830] env[68437]: DEBUG oslo_concurrency.lockutils [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.793993] env[68437]: DEBUG oslo_concurrency.lockutils [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.794394] env[68437]: DEBUG nova.compute.manager [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] No waiting events found dispatching network-vif-plugged-6c053c01-e575-4bdc-93ce-3604fa26d1ee {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.794472] env[68437]: WARNING nova.compute.manager [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Received unexpected event network-vif-plugged-6c053c01-e575-4bdc-93ce-3604fa26d1ee for instance with vm_state building and task_state spawning. [ 832.794633] env[68437]: DEBUG nova.compute.manager [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Received event network-changed-6c053c01-e575-4bdc-93ce-3604fa26d1ee {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 832.794788] env[68437]: DEBUG nova.compute.manager [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Refreshing instance network info cache due to event network-changed-6c053c01-e575-4bdc-93ce-3604fa26d1ee. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 832.794972] env[68437]: DEBUG oslo_concurrency.lockutils [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] Acquiring lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.795178] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa27943a-922d-4e1e-8b92-88106da8daa2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.803718] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 832.803718] env[68437]: value = "task-2944114" [ 832.803718] env[68437]: _type = "Task" [ 832.803718] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.815837] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944114, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.958999] env[68437]: DEBUG nova.network.neutron [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 833.013748] env[68437]: INFO nova.compute.manager [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Took 48.96 seconds to build instance. [ 833.020218] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944113, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064531} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.020496] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 833.023156] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1db413-2036-4a32-9351-91bbbfa8e27c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.027647] env[68437]: DEBUG nova.network.neutron [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Successfully created port: eeaa6a4a-be89-4fbc-acaa-32fa246347f4 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.057518] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] a01364f9-e30d-4140-ae41-1e7c4aaa2251/a01364f9-e30d-4140-ae41-1e7c4aaa2251.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 833.058568] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b268d480-8bfd-48f1-b105-d14836254879 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.087988] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 833.087988] env[68437]: value = "task-2944115" [ 833.087988] env[68437]: _type = "Task" [ 833.087988] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.098946] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944115, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.110034] env[68437]: DEBUG nova.compute.manager [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 833.273506] env[68437]: DEBUG nova.compute.manager [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 833.273823] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 833.274812] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1ef1b6-2402-47d5-b5c4-e242db761524 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.283504] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 833.286432] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32166e2f-2f59-4454-b8cc-598fe8c28f81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.294567] env[68437]: DEBUG oslo_vmware.api [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 833.294567] env[68437]: value = "task-2944116" [ 833.294567] env[68437]: _type = "Task" [ 833.294567] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.303179] env[68437]: DEBUG nova.network.neutron [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance_info_cache with network_info: [{"id": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "address": "fa:16:3e:17:4e:f5", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c053c01-e5", "ovs_interfaceid": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.306957] env[68437]: DEBUG oslo_vmware.api [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2944116, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.321046] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944114, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.516581] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c1645b4-fd42-46ca-9da3-5d7d13a35f93 tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.510s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.600061] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944115, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.802243] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.802595] env[68437]: DEBUG nova.compute.manager [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Instance network_info: |[{"id": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "address": "fa:16:3e:17:4e:f5", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c053c01-e5", "ovs_interfaceid": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 833.802978] env[68437]: DEBUG oslo_concurrency.lockutils [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] Acquired lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.803178] env[68437]: DEBUG nova.network.neutron [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Refreshing network info cache for port 6c053c01-e575-4bdc-93ce-3604fa26d1ee {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 833.805505] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:4e:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c053c01-e575-4bdc-93ce-3604fa26d1ee', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 833.819221] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 833.828555] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 833.839011] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4dc038e-3ec5-4852-9fec-4e40fc8611e9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.868196] env[68437]: DEBUG oslo_vmware.api [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2944116, 'name': PowerOffVM_Task, 'duration_secs': 0.387396} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.872798] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 833.872943] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 833.875034] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ee9e0b2-3c0a-4a87-8f70-6cdc335a07cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.883909] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944114, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.888493] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 833.888493] env[68437]: value = "task-2944117" [ 833.888493] env[68437]: _type = "Task" [ 833.888493] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.905432] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944117, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.915299] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92657fc2-9d9f-4d30-ba0d-024abf5bd38f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.923796] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbab224-4f38-4e13-a369-4e541321e8f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.969174] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015c71a5-b311-48cc-8551-c13e33c3fcc7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.973632] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.974024] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.974341] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Deleting the datastore file [datastore1] 45595615-59c0-4c59-b18c-b49a3126dbb7 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.974794] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-832b81f5-b3f2-4430-9b33-eaa016875726 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.985204] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc3588f-501c-43ca-b25a-b7ac9d675544 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.993716] env[68437]: DEBUG oslo_vmware.api [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for the task: (returnval){ [ 833.993716] env[68437]: value = "task-2944119" [ 833.993716] env[68437]: _type = "Task" [ 833.993716] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.007158] env[68437]: DEBUG nova.compute.provider_tree [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.013814] env[68437]: DEBUG oslo_vmware.api [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2944119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.021027] env[68437]: DEBUG nova.compute.manager [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 834.100246] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944115, 'name': ReconfigVM_Task, 'duration_secs': 0.937725} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.100542] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfigured VM instance instance-0000002d to attach disk [datastore1] a01364f9-e30d-4140-ae41-1e7c4aaa2251/a01364f9-e30d-4140-ae41-1e7c4aaa2251.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.101237] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-185af65f-3d57-4d86-aedf-b09cf10347a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.109177] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 834.109177] env[68437]: value = "task-2944120" [ 834.109177] env[68437]: _type = "Task" [ 834.109177] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.125471] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944120, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.132496] env[68437]: DEBUG nova.compute.manager [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 834.207033] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 834.211692] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.212037] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 834.212285] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.212512] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 834.212740] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 834.213089] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 834.213379] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 834.213610] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 834.213867] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 834.214147] env[68437]: DEBUG nova.virt.hardware [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 834.215549] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0ab802-4520-4895-9857-43aa46613807 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.230884] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f10ce3e-1d6a-42c0-81f5-3d3dd83e866e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.315649] env[68437]: DEBUG nova.network.neutron [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updated VIF entry in instance network info cache for port 6c053c01-e575-4bdc-93ce-3604fa26d1ee. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 834.316310] env[68437]: DEBUG nova.network.neutron [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance_info_cache with network_info: [{"id": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "address": "fa:16:3e:17:4e:f5", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c053c01-e5", "ovs_interfaceid": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.339695] env[68437]: DEBUG oslo_vmware.api [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944114, 'name': PowerOnVM_Task, 'duration_secs': 1.19508} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.340613] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 834.340613] env[68437]: INFO nova.compute.manager [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Took 9.69 seconds to spawn the instance on the hypervisor. [ 834.340613] env[68437]: DEBUG nova.compute.manager [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 834.341262] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78f2b7d-1c8a-4998-96c7-60bdab9e7ce8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.399690] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944117, 'name': CreateVM_Task, 'duration_secs': 0.432503} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.400624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 834.401569] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.401833] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.402437] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 834.403224] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-076b605b-dfdd-4b97-981b-49fd917defbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.409660] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 834.409660] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526d306d-3bdb-3789-7539-37f933442e4c" [ 834.409660] env[68437]: _type = "Task" [ 834.409660] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.419536] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526d306d-3bdb-3789-7539-37f933442e4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.505416] env[68437]: DEBUG oslo_vmware.api [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Task: {'id': task-2944119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176431} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.505710] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 834.505936] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 834.506139] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 834.506318] env[68437]: INFO nova.compute.manager [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Took 1.23 seconds to destroy the instance on the hypervisor. [ 834.506564] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 834.506758] env[68437]: DEBUG nova.compute.manager [-] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 834.506856] env[68437]: DEBUG nova.network.neutron [-] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 834.510807] env[68437]: DEBUG nova.scheduler.client.report [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 834.575627] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.623271] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944120, 'name': Rename_Task, 'duration_secs': 0.214342} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.623859] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.623859] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-947c623c-4f52-451e-bb63-19b7920e2d67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.630742] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 834.630742] env[68437]: value = "task-2944121" [ 834.630742] env[68437]: _type = "Task" [ 834.630742] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.640999] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.820279] env[68437]: DEBUG oslo_concurrency.lockutils [req-5f4ea26b-e9a4-47c0-8c5e-a187560792d2 req-9050f4ed-1c68-4c10-ba79-86443862d5a9 service nova] Releasing lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.823820] env[68437]: DEBUG nova.compute.manager [req-ca3c8ba1-1975-4f5f-8e92-36d3657af8eb req-0d61b2cd-1064-4d60-ab4b-19c6187cc9b8 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Received event network-changed-8aea055c-08c3-4b2a-ba4e-4aa831098aff {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 834.824573] env[68437]: DEBUG nova.compute.manager [req-ca3c8ba1-1975-4f5f-8e92-36d3657af8eb req-0d61b2cd-1064-4d60-ab4b-19c6187cc9b8 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Refreshing instance network info cache due to event network-changed-8aea055c-08c3-4b2a-ba4e-4aa831098aff. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 834.824573] env[68437]: DEBUG oslo_concurrency.lockutils [req-ca3c8ba1-1975-4f5f-8e92-36d3657af8eb req-0d61b2cd-1064-4d60-ab4b-19c6187cc9b8 service nova] Acquiring lock "refresh_cache-7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.824573] env[68437]: DEBUG oslo_concurrency.lockutils [req-ca3c8ba1-1975-4f5f-8e92-36d3657af8eb req-0d61b2cd-1064-4d60-ab4b-19c6187cc9b8 service nova] Acquired lock "refresh_cache-7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.825302] env[68437]: DEBUG nova.network.neutron [req-ca3c8ba1-1975-4f5f-8e92-36d3657af8eb req-0d61b2cd-1064-4d60-ab4b-19c6187cc9b8 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Refreshing network info cache for port 8aea055c-08c3-4b2a-ba4e-4aa831098aff {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 834.858906] env[68437]: INFO nova.compute.manager [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Took 50.78 seconds to build instance. [ 834.921014] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526d306d-3bdb-3789-7539-37f933442e4c, 'name': SearchDatastore_Task, 'duration_secs': 0.012655} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.921339] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.921569] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.921805] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.921949] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.922142] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.922410] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44d77d09-356b-425e-aebc-44b1c877e7c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.933933] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.933933] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.934213] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57f33f95-10e9-44f1-818c-d8ca5c83f202 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.945016] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 834.945016] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5297d099-69ca-5a5b-4444-3d2ba223d202" [ 834.945016] env[68437]: _type = "Task" [ 834.945016] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.954102] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5297d099-69ca-5a5b-4444-3d2ba223d202, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.016602] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.919s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.016982] env[68437]: DEBUG nova.compute.manager [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 835.019937] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.923s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.020382] env[68437]: DEBUG nova.objects.instance [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 835.147349] env[68437]: DEBUG oslo_vmware.api [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944121, 'name': PowerOnVM_Task, 'duration_secs': 0.480424} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.150483] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 835.150483] env[68437]: INFO nova.compute.manager [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Took 7.65 seconds to spawn the instance on the hypervisor. [ 835.150483] env[68437]: DEBUG nova.compute.manager [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 835.150483] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c178ff34-2783-4d34-9e17-ad21c43ea497 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.200882] env[68437]: DEBUG nova.network.neutron [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Successfully updated port: eeaa6a4a-be89-4fbc-acaa-32fa246347f4 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 835.365397] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72c16fa5-ccdb-4435-a0f5-2397ba208031 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.430s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.458949] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5297d099-69ca-5a5b-4444-3d2ba223d202, 'name': SearchDatastore_Task, 'duration_secs': 0.01102} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.459794] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ff06568-2967-4228-a100-ee5a215201f4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.465594] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 835.465594] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5250d18d-d598-047d-8345-6153d3208e0c" [ 835.465594] env[68437]: _type = "Task" [ 835.465594] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.473306] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5250d18d-d598-047d-8345-6153d3208e0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.527443] env[68437]: DEBUG nova.compute.utils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 835.528918] env[68437]: DEBUG nova.compute.manager [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 835.531619] env[68437]: DEBUG nova.network.neutron [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 835.589391] env[68437]: DEBUG nova.policy [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9cb5c1cd965a4825aa6c7727a5ccd481', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 835.625648] env[68437]: DEBUG nova.network.neutron [-] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.651458] env[68437]: DEBUG nova.network.neutron [req-ca3c8ba1-1975-4f5f-8e92-36d3657af8eb req-0d61b2cd-1064-4d60-ab4b-19c6187cc9b8 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Updated VIF entry in instance network info cache for port 8aea055c-08c3-4b2a-ba4e-4aa831098aff. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 835.651945] env[68437]: DEBUG nova.network.neutron [req-ca3c8ba1-1975-4f5f-8e92-36d3657af8eb req-0d61b2cd-1064-4d60-ab4b-19c6187cc9b8 service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Updating instance_info_cache with network_info: [{"id": "8aea055c-08c3-4b2a-ba4e-4aa831098aff", "address": "fa:16:3e:69:26:1a", "network": {"id": "31d7b9e2-4243-4a9f-bce5-3124599a9ef2", "bridge": "br-int", "label": "tempest-ServersTestJSON-2048017741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc43ad02f60c41178dc8b891b605843d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "778b9a40-d603-4765-ac88-bd6d42c457a2", "external-id": "nsx-vlan-transportzone-114", "segmentation_id": 114, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8aea055c-08", "ovs_interfaceid": "8aea055c-08c3-4b2a-ba4e-4aa831098aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.671496] env[68437]: INFO nova.compute.manager [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Took 46.56 seconds to build instance. [ 835.704295] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "refresh_cache-f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.704467] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired lock "refresh_cache-f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.704621] env[68437]: DEBUG nova.network.neutron [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 835.868018] env[68437]: DEBUG nova.compute.manager [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 835.978023] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5250d18d-d598-047d-8345-6153d3208e0c, 'name': SearchDatastore_Task, 'duration_secs': 0.021582} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.978023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.978023] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] b7706bf2-936f-439c-8e9f-b2241d0c211c/b7706bf2-936f-439c-8e9f-b2241d0c211c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.978023] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae44bbed-c997-4b19-8c88-84f108e1bd03 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.985169] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 835.985169] env[68437]: value = "task-2944122" [ 835.985169] env[68437]: _type = "Task" [ 835.985169] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.994864] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.030805] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e3b7f600-ea6f-426f-aa35-e46439671767 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.031959] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.855s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.032257] env[68437]: DEBUG nova.objects.instance [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lazy-loading 'resources' on Instance uuid c74569b8-dfc9-4a74-9d25-74b484bd9477 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 836.038442] env[68437]: DEBUG nova.compute.manager [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 836.129021] env[68437]: INFO nova.compute.manager [-] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Took 1.62 seconds to deallocate network for instance. [ 836.155592] env[68437]: DEBUG oslo_concurrency.lockutils [req-ca3c8ba1-1975-4f5f-8e92-36d3657af8eb req-0d61b2cd-1064-4d60-ab4b-19c6187cc9b8 service nova] Releasing lock "refresh_cache-7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.173480] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07d87af2-7e32-4d3a-804c-dd7a39dfd244 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.442s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.182481] env[68437]: DEBUG nova.network.neutron [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Successfully created port: 1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.248277] env[68437]: DEBUG nova.network.neutron [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 836.380486] env[68437]: INFO nova.compute.manager [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Rescuing [ 836.380486] env[68437]: DEBUG oslo_concurrency.lockutils [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.380486] env[68437]: DEBUG oslo_concurrency.lockutils [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.380486] env[68437]: DEBUG nova.network.neutron [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 836.415855] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.462451] env[68437]: DEBUG nova.network.neutron [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Updating instance_info_cache with network_info: [{"id": "eeaa6a4a-be89-4fbc-acaa-32fa246347f4", "address": "fa:16:3e:df:96:17", "network": {"id": "2de0f0ee-17ab-497a-adc4-23b69d5d0a9f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-277054154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b5eecfb2734eaf8288932f146e3d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeeaa6a4a-be", "ovs_interfaceid": "eeaa6a4a-be89-4fbc-acaa-32fa246347f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.498813] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944122, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.641856] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.675869] env[68437]: DEBUG nova.compute.manager [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 836.972013] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Releasing lock "refresh_cache-f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.972013] env[68437]: DEBUG nova.compute.manager [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Instance network_info: |[{"id": "eeaa6a4a-be89-4fbc-acaa-32fa246347f4", "address": "fa:16:3e:df:96:17", "network": {"id": "2de0f0ee-17ab-497a-adc4-23b69d5d0a9f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-277054154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b5eecfb2734eaf8288932f146e3d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeeaa6a4a-be", "ovs_interfaceid": "eeaa6a4a-be89-4fbc-acaa-32fa246347f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 836.972366] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:96:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eeaa6a4a-be89-4fbc-acaa-32fa246347f4', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.979331] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.980193] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 836.980567] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f04d25cc-3b64-425a-a20d-7d5830b16b67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.017444] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944122, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.826098} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.018858] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] b7706bf2-936f-439c-8e9f-b2241d0c211c/b7706bf2-936f-439c-8e9f-b2241d0c211c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 837.019074] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 837.019308] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.019308] env[68437]: value = "task-2944123" [ 837.019308] env[68437]: _type = "Task" [ 837.019308] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.019569] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0de97f80-d57d-4a49-8e01-b9a0c69ab1ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.035836] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944123, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.037296] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 837.037296] env[68437]: value = "task-2944124" [ 837.037296] env[68437]: _type = "Task" [ 837.037296] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.048368] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944124, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.057049] env[68437]: DEBUG nova.compute.manager [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 837.118886] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 837.119165] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.119331] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 837.119522] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.119664] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 837.119812] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 837.120033] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 837.120198] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 837.120371] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 837.120533] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 837.120707] env[68437]: DEBUG nova.virt.hardware [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 837.121856] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863fe045-2b3a-41f1-bfec-84c8c34d0b8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.135880] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d77eb7-a0b9-4187-9600-be9bcdfa1b74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.160051] env[68437]: DEBUG nova.compute.manager [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Received event network-vif-plugged-eeaa6a4a-be89-4fbc-acaa-32fa246347f4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 837.160271] env[68437]: DEBUG oslo_concurrency.lockutils [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] Acquiring lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.160478] env[68437]: DEBUG oslo_concurrency.lockutils [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] Lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.160648] env[68437]: DEBUG oslo_concurrency.lockutils [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] Lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.161184] env[68437]: DEBUG nova.compute.manager [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] No waiting events found dispatching network-vif-plugged-eeaa6a4a-be89-4fbc-acaa-32fa246347f4 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 837.161184] env[68437]: WARNING nova.compute.manager [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Received unexpected event network-vif-plugged-eeaa6a4a-be89-4fbc-acaa-32fa246347f4 for instance with vm_state building and task_state spawning. [ 837.161184] env[68437]: DEBUG nova.compute.manager [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Received event network-vif-deleted-095e5fc1-9fd6-4b04-b1af-3637ee220d7c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 837.161409] env[68437]: DEBUG nova.compute.manager [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Received event network-changed-eeaa6a4a-be89-4fbc-acaa-32fa246347f4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 837.161617] env[68437]: DEBUG nova.compute.manager [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Refreshing instance network info cache due to event network-changed-eeaa6a4a-be89-4fbc-acaa-32fa246347f4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 837.161617] env[68437]: DEBUG oslo_concurrency.lockutils [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] Acquiring lock "refresh_cache-f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.161777] env[68437]: DEBUG oslo_concurrency.lockutils [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] Acquired lock "refresh_cache-f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.161889] env[68437]: DEBUG nova.network.neutron [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Refreshing network info cache for port eeaa6a4a-be89-4fbc-acaa-32fa246347f4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 837.177666] env[68437]: DEBUG nova.network.neutron [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Updating instance_info_cache with network_info: [{"id": "9e2a02d6-0496-4807-b04b-bcedf775cfa4", "address": "fa:16:3e:f2:e2:7c", "network": {"id": "1c341ad6-0e1b-4211-be89-d39bb74a6f59", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-101911256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ae9811689c645a7af2096a600ed6e1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2a02d6-04", "ovs_interfaceid": "9e2a02d6-0496-4807-b04b-bcedf775cfa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.200163] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.214164] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b8bbf5-1bd7-4b69-8af2-8204b8aab64b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.221827] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d157ee-dc08-4d66-a6f6-788ecb5f11b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.254532] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf31159-a42a-461b-8f1a-1560ab8adc6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.262874] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a075084-8693-447e-af2e-c4e2909199f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.276556] env[68437]: DEBUG nova.compute.provider_tree [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.440799] env[68437]: DEBUG nova.compute.manager [req-e1cb10ee-c869-4aae-8c70-75d4bab25cea req-b4cd29fe-e363-47d5-b9fb-e23a643ed524 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Received event network-changed-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 837.441014] env[68437]: DEBUG nova.compute.manager [req-e1cb10ee-c869-4aae-8c70-75d4bab25cea req-b4cd29fe-e363-47d5-b9fb-e23a643ed524 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Refreshing instance network info cache due to event network-changed-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 837.442207] env[68437]: DEBUG oslo_concurrency.lockutils [req-e1cb10ee-c869-4aae-8c70-75d4bab25cea req-b4cd29fe-e363-47d5-b9fb-e23a643ed524 service nova] Acquiring lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.442375] env[68437]: DEBUG oslo_concurrency.lockutils [req-e1cb10ee-c869-4aae-8c70-75d4bab25cea req-b4cd29fe-e363-47d5-b9fb-e23a643ed524 service nova] Acquired lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.442544] env[68437]: DEBUG nova.network.neutron [req-e1cb10ee-c869-4aae-8c70-75d4bab25cea req-b4cd29fe-e363-47d5-b9fb-e23a643ed524 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Refreshing network info cache for port d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 837.533227] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944123, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.547073] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944124, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075674} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.547637] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.549295] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa00749-005a-4183-bebf-7710850c66d5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.575956] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] b7706bf2-936f-439c-8e9f-b2241d0c211c/b7706bf2-936f-439c-8e9f-b2241d0c211c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.576979] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d380c5d-8782-47ae-9a0c-0835f74d5006 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.598247] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 837.598247] env[68437]: value = "task-2944125" [ 837.598247] env[68437]: _type = "Task" [ 837.598247] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.337619] env[68437]: DEBUG nova.network.neutron [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Successfully updated port: 1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.341122] env[68437]: DEBUG oslo_concurrency.lockutils [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.343699] env[68437]: DEBUG nova.scheduler.client.report [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.362920] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944125, 'name': ReconfigVM_Task, 'duration_secs': 0.501243} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.362920] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944123, 'name': CreateVM_Task, 'duration_secs': 0.849534} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.362920] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] b7706bf2-936f-439c-8e9f-b2241d0c211c/b7706bf2-936f-439c-8e9f-b2241d0c211c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.362920] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 838.363174] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b85268b7-6b9f-4b18-9ccc-5d4185e3e424 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.364968] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.365169] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.365470] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 838.365706] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae6defca-578a-4ab9-aef6-b2ab5a75afca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.373202] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 838.373202] env[68437]: value = "task-2944126" [ 838.373202] env[68437]: _type = "Task" [ 838.373202] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.373457] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 838.373457] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e6abee-fee3-2556-32d8-38142b75095e" [ 838.373457] env[68437]: _type = "Task" [ 838.373457] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.384910] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944126, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.387791] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e6abee-fee3-2556-32d8-38142b75095e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.692634] env[68437]: DEBUG nova.network.neutron [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Updated VIF entry in instance network info cache for port eeaa6a4a-be89-4fbc-acaa-32fa246347f4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 838.693011] env[68437]: DEBUG nova.network.neutron [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Updating instance_info_cache with network_info: [{"id": "eeaa6a4a-be89-4fbc-acaa-32fa246347f4", "address": "fa:16:3e:df:96:17", "network": {"id": "2de0f0ee-17ab-497a-adc4-23b69d5d0a9f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-277054154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b5eecfb2734eaf8288932f146e3d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeeaa6a4a-be", "ovs_interfaceid": "eeaa6a4a-be89-4fbc-acaa-32fa246347f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.701047] env[68437]: DEBUG nova.network.neutron [req-e1cb10ee-c869-4aae-8c70-75d4bab25cea req-b4cd29fe-e363-47d5-b9fb-e23a643ed524 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updated VIF entry in instance network info cache for port d22dccb0-3e51-43b6-8bfe-4f6b83be5b62. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 838.701382] env[68437]: DEBUG nova.network.neutron [req-e1cb10ee-c869-4aae-8c70-75d4bab25cea req-b4cd29fe-e363-47d5-b9fb-e23a643ed524 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updating instance_info_cache with network_info: [{"id": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "address": "fa:16:3e:b0:0c:e5", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22dccb0-3e", "ovs_interfaceid": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.849974] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.850147] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquired lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.850303] env[68437]: DEBUG nova.network.neutron [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 838.852948] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.821s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.855361] env[68437]: DEBUG oslo_concurrency.lockutils [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.887s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.855596] env[68437]: DEBUG nova.objects.instance [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lazy-loading 'resources' on Instance uuid ea330078-a8f2-41f4-a161-5d0e29ddfab5 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.886180] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944126, 'name': Rename_Task, 'duration_secs': 0.435928} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.889701] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.889986] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e6abee-fee3-2556-32d8-38142b75095e, 'name': SearchDatastore_Task, 'duration_secs': 0.014593} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.890190] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2d8deac-bab0-4448-8c67-4c04e299c242 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.891638] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.891864] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.892105] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.892399] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.892436] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.892646] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79cedf6c-9424-4683-bf94-0f52bf53616f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.900964] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 838.900964] env[68437]: value = "task-2944127" [ 838.900964] env[68437]: _type = "Task" [ 838.900964] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.910509] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.910689] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.914065] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a99132c3-3e89-4c92-b61d-2404998a3624 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.916354] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944127, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.917177] env[68437]: INFO nova.scheduler.client.report [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Deleted allocations for instance c74569b8-dfc9-4a74-9d25-74b484bd9477 [ 838.925711] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 838.925711] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5203d87e-a83a-48ce-95ce-6d54464ac3e8" [ 838.925711] env[68437]: _type = "Task" [ 838.925711] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.933937] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5203d87e-a83a-48ce-95ce-6d54464ac3e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.196045] env[68437]: DEBUG oslo_concurrency.lockutils [req-6e9a8c72-6ed9-46b2-a567-8a686426317f req-18d92e92-171d-4dc5-8180-233cc96b1c7a service nova] Releasing lock "refresh_cache-f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.206858] env[68437]: DEBUG oslo_concurrency.lockutils [req-e1cb10ee-c869-4aae-8c70-75d4bab25cea req-b4cd29fe-e363-47d5-b9fb-e23a643ed524 service nova] Releasing lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.221244] env[68437]: DEBUG nova.compute.manager [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Received event network-vif-plugged-1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 839.221244] env[68437]: DEBUG oslo_concurrency.lockutils [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] Acquiring lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.221244] env[68437]: DEBUG oslo_concurrency.lockutils [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.221569] env[68437]: DEBUG oslo_concurrency.lockutils [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.221569] env[68437]: DEBUG nova.compute.manager [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] No waiting events found dispatching network-vif-plugged-1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 839.222520] env[68437]: WARNING nova.compute.manager [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Received unexpected event network-vif-plugged-1fd37ce5-a7b4-43f3-8189-845c59896665 for instance with vm_state building and task_state spawning. [ 839.222520] env[68437]: DEBUG nova.compute.manager [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Received event network-changed-1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 839.222520] env[68437]: DEBUG nova.compute.manager [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Refreshing instance network info cache due to event network-changed-1fd37ce5-a7b4-43f3-8189-845c59896665. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 839.222520] env[68437]: DEBUG oslo_concurrency.lockutils [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] Acquiring lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.393010] env[68437]: DEBUG nova.network.neutron [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 839.417239] env[68437]: DEBUG oslo_vmware.api [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944127, 'name': PowerOnVM_Task, 'duration_secs': 0.513987} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.419801] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.420016] env[68437]: INFO nova.compute.manager [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Took 8.05 seconds to spawn the instance on the hypervisor. [ 839.420206] env[68437]: DEBUG nova.compute.manager [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.421535] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988bbe4c-c30a-4680-8c12-39f20b793bcc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.434180] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d4c5ad1-24d9-461a-85e9-e572280efaf3 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "c74569b8-dfc9-4a74-9d25-74b484bd9477" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.175s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.447844] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5203d87e-a83a-48ce-95ce-6d54464ac3e8, 'name': SearchDatastore_Task, 'duration_secs': 0.02054} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.448901] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17d49c5f-c163-4dfe-bfa6-293cfa60188a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.456951] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 839.456951] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d38c2c-ba9e-47e7-9901-9af9878215e7" [ 839.456951] env[68437]: _type = "Task" [ 839.456951] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.468045] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d38c2c-ba9e-47e7-9901-9af9878215e7, 'name': SearchDatastore_Task, 'duration_secs': 0.011355} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.470725] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.470948] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81/f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 839.471397] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44e511b9-91d6-492b-b630-77aa6c23dc5c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.480785] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 839.480785] env[68437]: value = "task-2944128" [ 839.480785] env[68437]: _type = "Task" [ 839.480785] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.489141] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.616449] env[68437]: DEBUG nova.network.neutron [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updating instance_info_cache with network_info: [{"id": "1fd37ce5-a7b4-43f3-8189-845c59896665", "address": "fa:16:3e:b7:fa:48", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fd37ce5-a7", "ovs_interfaceid": "1fd37ce5-a7b4-43f3-8189-845c59896665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.911357] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f269e81-aa96-4c5d-a3df-a82031b629fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.919624] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d04e963-bdcf-4f05-abb1-7904c6b67728 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.923539] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.923822] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb16d3bd-e8c7-46de-a9ed-6b6cefd5c570 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.961811] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7e2a0f-3ebf-4bfe-8acb-92aef80f769a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.964726] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 839.964726] env[68437]: value = "task-2944129" [ 839.964726] env[68437]: _type = "Task" [ 839.964726] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.970702] env[68437]: INFO nova.compute.manager [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Took 43.28 seconds to build instance. [ 839.976329] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fd4360-481b-488b-8adc-ecdf7716d398 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.982547] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.995475] env[68437]: DEBUG nova.compute.provider_tree [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.000917] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944128, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.119177] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Releasing lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.119609] env[68437]: DEBUG nova.compute.manager [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance network_info: |[{"id": "1fd37ce5-a7b4-43f3-8189-845c59896665", "address": "fa:16:3e:b7:fa:48", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fd37ce5-a7", "ovs_interfaceid": "1fd37ce5-a7b4-43f3-8189-845c59896665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 840.119968] env[68437]: DEBUG oslo_concurrency.lockutils [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] Acquired lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.120180] env[68437]: DEBUG nova.network.neutron [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Refreshing network info cache for port 1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 840.121574] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:fa:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aec0089a-ff85-4bef-bad8-c84de39af71a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fd37ce5-a7b4-43f3-8189-845c59896665', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.129873] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 840.130380] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.130613] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9cfa3cf-53ac-4d35-bd18-bdf396338d30 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.151561] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.151561] env[68437]: value = "task-2944130" [ 840.151561] env[68437]: _type = "Task" [ 840.151561] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.160243] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944130, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.475931] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8cac8511-9d19-427d-824d-b70e29f07c5c tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.821s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.476214] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944129, 'name': PowerOffVM_Task, 'duration_secs': 0.235733} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.477443] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 840.478420] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79284ffe-a9c4-48b1-8f39-7be077b6d9c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.498814] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21684c83-4c41-406f-8206-e6a29f5556d7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.502845] env[68437]: DEBUG nova.scheduler.client.report [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.509746] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944128, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546303} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.510671] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81/f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 840.510671] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.510882] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17ce9cf1-73d4-4324-bcaf-07d68abd66ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.521385] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 840.521385] env[68437]: value = "task-2944131" [ 840.521385] env[68437]: _type = "Task" [ 840.521385] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.531070] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944131, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.560281] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 840.560583] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a91953ba-0cbf-4b24-b700-69ba1375d310 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.566547] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 840.566547] env[68437]: value = "task-2944132" [ 840.566547] env[68437]: _type = "Task" [ 840.566547] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.574802] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944132, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.661200] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944130, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.871681] env[68437]: DEBUG nova.network.neutron [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updated VIF entry in instance network info cache for port 1fd37ce5-a7b4-43f3-8189-845c59896665. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 840.872077] env[68437]: DEBUG nova.network.neutron [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updating instance_info_cache with network_info: [{"id": "1fd37ce5-a7b4-43f3-8189-845c59896665", "address": "fa:16:3e:b7:fa:48", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fd37ce5-a7", "ovs_interfaceid": "1fd37ce5-a7b4-43f3-8189-845c59896665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.980557] env[68437]: DEBUG nova.compute.manager [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 841.012906] env[68437]: DEBUG oslo_concurrency.lockutils [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.156s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.019666] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.833s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.020859] env[68437]: INFO nova.compute.claims [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.034685] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944131, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184682} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.034685] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 841.034685] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99eec2f-ff9f-419f-a00e-5ab8a7e957aa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.065367] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81/f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 841.065367] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-146ba121-0fab-4928-b33f-7418f102e9f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.080666] env[68437]: INFO nova.scheduler.client.report [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Deleted allocations for instance ea330078-a8f2-41f4-a161-5d0e29ddfab5 [ 841.093896] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 841.094142] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.094384] env[68437]: DEBUG oslo_concurrency.lockutils [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.094531] env[68437]: DEBUG oslo_concurrency.lockutils [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.094745] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.095076] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 841.095076] env[68437]: value = "task-2944133" [ 841.095076] env[68437]: _type = "Task" [ 841.095076] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.095266] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5b8d074-d7c2-4d44-bdb4-62110b03753e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.106381] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944133, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.108032] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.108032] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.108032] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-277a82eb-4dbf-4f5b-8799-13b056a1de45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.112857] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 841.112857] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52797bdd-93f0-d4d4-7464-0cbad4ca9b4b" [ 841.112857] env[68437]: _type = "Task" [ 841.112857] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.121019] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52797bdd-93f0-d4d4-7464-0cbad4ca9b4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.162047] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944130, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.375602] env[68437]: DEBUG oslo_concurrency.lockutils [req-8ce61512-1b19-4a36-b3c4-fdfcce23da42 req-20653185-fb1e-4f51-8bba-1a68d8670c5b service nova] Releasing lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.505764] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.591385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-94ca03cc-e51a-4bbd-9f08-e120380c2ebf tempest-VolumesAssistedSnapshotsTest-2123984171 tempest-VolumesAssistedSnapshotsTest-2123984171-project-member] Lock "ea330078-a8f2-41f4-a161-5d0e29ddfab5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.041s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.608766] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.623108] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52797bdd-93f0-d4d4-7464-0cbad4ca9b4b, 'name': SearchDatastore_Task, 'duration_secs': 0.013417} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.624099] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc2591ad-4298-42b6-9675-10f4dd0fcc16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.629350] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 841.629350] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526bf074-9240-876a-6e8d-390272722e6d" [ 841.629350] env[68437]: _type = "Task" [ 841.629350] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.637190] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526bf074-9240-876a-6e8d-390272722e6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.661932] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944130, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.668069] env[68437]: DEBUG nova.objects.instance [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lazy-loading 'flavor' on Instance uuid 07d98c5c-ede8-4001-93b2-1b1d83687ca1 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 842.110733] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944133, 'name': ReconfigVM_Task, 'duration_secs': 0.923484} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.110939] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Reconfigured VM instance instance-0000002f to attach disk [datastore1] f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81/f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 842.111627] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-181db7a2-2d06-4c27-bf0f-f10836010b4f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.120008] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 842.120008] env[68437]: value = "task-2944134" [ 842.120008] env[68437]: _type = "Task" [ 842.120008] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.132786] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944134, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.145451] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526bf074-9240-876a-6e8d-390272722e6d, 'name': SearchDatastore_Task, 'duration_secs': 0.032696} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.145737] env[68437]: DEBUG oslo_concurrency.lockutils [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.146021] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] acbf4c5c-341c-4ebd-ad29-90ebf531aa86/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. {{(pid=68437) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 842.146319] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c443c4b5-dad5-4be8-bdcd-43af30217636 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.154518] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 842.154518] env[68437]: value = "task-2944135" [ 842.154518] env[68437]: _type = "Task" [ 842.154518] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.170447] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.172527] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944130, 'name': CreateVM_Task, 'duration_secs': 1.732422} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.172730] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.173568] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.173754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.174123] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 842.174629] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.174792] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquired lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.176516] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18f96c0b-cb9d-4835-aca0-029781a440a8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.183432] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 842.183432] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528f7f5b-c244-906c-f7ca-b569b70aa5ba" [ 842.183432] env[68437]: _type = "Task" [ 842.183432] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.193868] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528f7f5b-c244-906c-f7ca-b569b70aa5ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.489203] env[68437]: DEBUG nova.compute.manager [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Stashing vm_state: active {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 842.613027] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f6c4cf-70c0-4f05-afd7-0dad5aafb0c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.621683] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7553cb90-f3d2-4738-ae5a-da4d11cec280 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.677524] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79588791-a0a7-4f6c-99dd-622291131231 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.680904] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944134, 'name': Rename_Task, 'duration_secs': 0.158464} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.681524] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 842.682673] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32e3b7a0-9d0b-4bf7-b72d-2f8afa251b81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.697025] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944135, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.697404] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d636dc7-b137-4c6c-b48f-7a67aef2009b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.702227] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 842.702227] env[68437]: value = "task-2944136" [ 842.702227] env[68437]: _type = "Task" [ 842.702227] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.706982] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528f7f5b-c244-906c-f7ca-b569b70aa5ba, 'name': SearchDatastore_Task, 'duration_secs': 0.021031} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.710970] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.711406] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.711780] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.712052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.712339] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.722031] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6dcc8056-6e47-4ca3-bc1f-5ed6c6116652 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.724294] env[68437]: DEBUG nova.compute.provider_tree [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.732848] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944136, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.735063] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.735420] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.736703] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b55a7d6b-f4ad-4bce-b62f-e0a1a0480b6f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.744477] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 842.744477] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521f0b0f-75bf-1703-8b58-c957c856b354" [ 842.744477] env[68437]: _type = "Task" [ 842.744477] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.755706] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521f0b0f-75bf-1703-8b58-c957c856b354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.825191] env[68437]: DEBUG nova.network.neutron [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 842.912309] env[68437]: DEBUG nova.compute.manager [req-ffeae470-dec5-46d2-8379-65e2bb2d5598 req-f33817d3-8a47-4ff3-ba6b-091e4b6ea93a service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Received event network-changed-102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 842.912499] env[68437]: DEBUG nova.compute.manager [req-ffeae470-dec5-46d2-8379-65e2bb2d5598 req-f33817d3-8a47-4ff3-ba6b-091e4b6ea93a service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Refreshing instance network info cache due to event network-changed-102fc7ce-ac0b-465b-8073-7ba895ea1293. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 842.912762] env[68437]: DEBUG oslo_concurrency.lockutils [req-ffeae470-dec5-46d2-8379-65e2bb2d5598 req-f33817d3-8a47-4ff3-ba6b-091e4b6ea93a service nova] Acquiring lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.006619] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.180187] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944135, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597551} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.180604] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] acbf4c5c-341c-4ebd-ad29-90ebf531aa86/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. [ 843.181446] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b198877b-812c-48ad-bd10-1d78abc52d20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.207802] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] acbf4c5c-341c-4ebd-ad29-90ebf531aa86/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.208176] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc682298-0fbe-4fea-8ae3-21ecd29d4855 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.228314] env[68437]: DEBUG nova.scheduler.client.report [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 843.235917] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944136, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.236219] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 843.236219] env[68437]: value = "task-2944137" [ 843.236219] env[68437]: _type = "Task" [ 843.236219] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.246185] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944137, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.256794] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521f0b0f-75bf-1703-8b58-c957c856b354, 'name': SearchDatastore_Task, 'duration_secs': 0.01424} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.258031] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49473793-0aab-4219-9cdc-cf7e622ad94f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.264614] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 843.264614] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5272e8b5-3233-6ebd-9d1d-8806514b081f" [ 843.264614] env[68437]: _type = "Task" [ 843.264614] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.273583] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5272e8b5-3233-6ebd-9d1d-8806514b081f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.616404] env[68437]: DEBUG nova.network.neutron [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updating instance_info_cache with network_info: [{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.718910] env[68437]: DEBUG oslo_vmware.api [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944136, 'name': PowerOnVM_Task, 'duration_secs': 0.785415} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.719071] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 843.719267] env[68437]: INFO nova.compute.manager [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Took 9.59 seconds to spawn the instance on the hypervisor. [ 843.719443] env[68437]: DEBUG nova.compute.manager [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 843.720285] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351e05b5-78d5-4148-993d-42852f154234 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.740365] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.722s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.741681] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 843.744673] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.971s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.745102] env[68437]: DEBUG nova.objects.instance [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lazy-loading 'resources' on Instance uuid 860107df-4e9b-44b1-9e85-b0ee3a827268 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.760933] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944137, 'name': ReconfigVM_Task, 'duration_secs': 0.324525} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.762325] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Reconfigured VM instance instance-0000002c to attach disk [datastore1] acbf4c5c-341c-4ebd-ad29-90ebf531aa86/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.762961] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd8770b-5960-4927-af35-0465c823a803 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.777638] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5272e8b5-3233-6ebd-9d1d-8806514b081f, 'name': SearchDatastore_Task, 'duration_secs': 0.016977} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.795500] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.795803] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 26985e45-21ff-40bb-ac2b-c6f3700ccc97/26985e45-21ff-40bb-ac2b-c6f3700ccc97.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 843.801726] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-556cb32c-feff-4117-80d5-927a7d8ccf38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.803448] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a87e501-8a21-4a9c-a4e3-d833810f24f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.823256] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 843.823256] env[68437]: value = "task-2944138" [ 843.823256] env[68437]: _type = "Task" [ 843.823256] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.824641] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 843.824641] env[68437]: value = "task-2944139" [ 843.824641] env[68437]: _type = "Task" [ 843.824641] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.837841] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944138, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.840755] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944139, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.119662] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Releasing lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.119960] env[68437]: DEBUG nova.compute.manager [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Inject network info {{(pid=68437) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 844.120264] env[68437]: DEBUG nova.compute.manager [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] network_info to inject: |[{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 844.125736] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Reconfiguring VM instance to set the machine id {{(pid=68437) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 844.126113] env[68437]: DEBUG oslo_concurrency.lockutils [req-ffeae470-dec5-46d2-8379-65e2bb2d5598 req-f33817d3-8a47-4ff3-ba6b-091e4b6ea93a service nova] Acquired lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.126346] env[68437]: DEBUG nova.network.neutron [req-ffeae470-dec5-46d2-8379-65e2bb2d5598 req-f33817d3-8a47-4ff3-ba6b-091e4b6ea93a service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Refreshing network info cache for port 102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 844.127656] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0308f7c-681f-4343-8bc4-dbd3203e0587 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.150111] env[68437]: DEBUG oslo_vmware.api [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 844.150111] env[68437]: value = "task-2944140" [ 844.150111] env[68437]: _type = "Task" [ 844.150111] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.160108] env[68437]: DEBUG oslo_vmware.api [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2944140, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.238302] env[68437]: INFO nova.compute.manager [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Took 44.40 seconds to build instance. [ 844.253762] env[68437]: DEBUG nova.compute.utils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 844.258546] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 844.259290] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 844.342418] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944139, 'name': ReconfigVM_Task, 'duration_secs': 0.181842} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.347665] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.348108] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944138, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.351313] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-688ace6e-cde9-4e92-97e4-7a9bb2d8b05d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.363738] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 844.363738] env[68437]: value = "task-2944141" [ 844.363738] env[68437]: _type = "Task" [ 844.363738] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.365803] env[68437]: DEBUG nova.policy [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59b6e538d77d441e852466b24b70e0a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0e56fa6cd94413d82963b143143f519', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 844.387558] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.614397] env[68437]: DEBUG nova.objects.instance [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lazy-loading 'flavor' on Instance uuid 07d98c5c-ede8-4001-93b2-1b1d83687ca1 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.663689] env[68437]: DEBUG oslo_vmware.api [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2944140, 'name': ReconfigVM_Task, 'duration_secs': 0.199389} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.663995] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1be0ee3e-cd22-4b82-9aa9-f5fd3db69c58 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Reconfigured VM instance to set the machine id {{(pid=68437) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 844.694797] env[68437]: DEBUG nova.network.neutron [req-ffeae470-dec5-46d2-8379-65e2bb2d5598 req-f33817d3-8a47-4ff3-ba6b-091e4b6ea93a service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updated VIF entry in instance network info cache for port 102fc7ce-ac0b-465b-8073-7ba895ea1293. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 844.695206] env[68437]: DEBUG nova.network.neutron [req-ffeae470-dec5-46d2-8379-65e2bb2d5598 req-f33817d3-8a47-4ff3-ba6b-091e4b6ea93a service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updating instance_info_cache with network_info: [{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.740738] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aa413f5-b385-4a77-a50d-5aa4831eeb54 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.052s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.765171] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 844.842350] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944138, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73543} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.842572] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 26985e45-21ff-40bb-ac2b-c6f3700ccc97/26985e45-21ff-40bb-ac2b-c6f3700ccc97.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 844.842681] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.843037] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-767a4479-ff36-4e36-a716-1e003df989e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.868302] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 844.868302] env[68437]: value = "task-2944142" [ 844.868302] env[68437]: _type = "Task" [ 844.868302] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.882188] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944142, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.886313] env[68437]: DEBUG oslo_vmware.api [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944141, 'name': PowerOnVM_Task, 'duration_secs': 0.507768} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.886825] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.889856] env[68437]: DEBUG nova.compute.manager [None req-21054747-2c41-41bf-ae81-d55e2a29fd4e tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 844.890539] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb10f81-6268-4e59-b2a5-8f15f637fb16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.951584] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Successfully created port: b47860d3-efcd-4110-8153-6bc15940189c {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.957661] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6979bc-c1d7-46c2-ad4c-7b6fd471108a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.968740] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d973421-f7f4-44ae-944e-9160e6c47164 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.011522] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8376efb-8773-4a4b-ba72-5c5c60686ea3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.019891] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5f58e3-e22a-4f46-80c6-a09e8634e53f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.034827] env[68437]: DEBUG nova.compute.provider_tree [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.122947] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.201855] env[68437]: DEBUG oslo_concurrency.lockutils [req-ffeae470-dec5-46d2-8379-65e2bb2d5598 req-f33817d3-8a47-4ff3-ba6b-091e4b6ea93a service nova] Releasing lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.202311] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquired lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.245892] env[68437]: DEBUG nova.compute.manager [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 845.378775] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944142, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.292709} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.379201] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.380109] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7c1425-f5c7-47a1-9197-5a33e113af79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.410268] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 26985e45-21ff-40bb-ac2b-c6f3700ccc97/26985e45-21ff-40bb-ac2b-c6f3700ccc97.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.414486] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f523326-64dd-4567-b9ec-6bffeb237394 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.440059] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 845.440059] env[68437]: value = "task-2944143" [ 845.440059] env[68437]: _type = "Task" [ 845.440059] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.452282] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944143, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.538679] env[68437]: DEBUG nova.scheduler.client.report [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 845.672453] env[68437]: DEBUG nova.network.neutron [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 845.726904] env[68437]: DEBUG nova.compute.manager [req-7c5911ed-3e9a-45be-adcc-d37aaea8f5b3 req-6f552c07-aad3-448e-898c-fd45a0acc8e1 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Received event network-changed-102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 845.726904] env[68437]: DEBUG nova.compute.manager [req-7c5911ed-3e9a-45be-adcc-d37aaea8f5b3 req-6f552c07-aad3-448e-898c-fd45a0acc8e1 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Refreshing instance network info cache due to event network-changed-102fc7ce-ac0b-465b-8073-7ba895ea1293. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 845.727073] env[68437]: DEBUG oslo_concurrency.lockutils [req-7c5911ed-3e9a-45be-adcc-d37aaea8f5b3 req-6f552c07-aad3-448e-898c-fd45a0acc8e1 service nova] Acquiring lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.776409] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 845.779867] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.809485] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 845.809738] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.809922] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 845.810134] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.810289] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 845.810439] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 845.810652] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 845.810813] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 845.810980] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 845.811157] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 845.811332] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 845.812281] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007b4116-322d-4cb4-b15c-b93b08baaf3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.821536] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73df43c3-eb47-49cb-a49d-06b73d55fcf8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.951358] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944143, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.046661] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.302s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.049555] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.444s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.049994] env[68437]: DEBUG nova.objects.instance [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lazy-loading 'resources' on Instance uuid 995a3eae-c025-4efa-b509-0bf678bb0388 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 846.087728] env[68437]: INFO nova.scheduler.client.report [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted allocations for instance 860107df-4e9b-44b1-9e85-b0ee3a827268 [ 846.417882] env[68437]: DEBUG nova.network.neutron [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updating instance_info_cache with network_info: [{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.451997] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944143, 'name': ReconfigVM_Task, 'duration_secs': 0.67294} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.452375] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 26985e45-21ff-40bb-ac2b-c6f3700ccc97/26985e45-21ff-40bb-ac2b-c6f3700ccc97.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.452999] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c238c222-e818-42ed-a3c4-6b9d280a2793 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.462301] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 846.462301] env[68437]: value = "task-2944144" [ 846.462301] env[68437]: _type = "Task" [ 846.462301] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.472819] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944144, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.557978] env[68437]: DEBUG nova.compute.manager [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 846.559062] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae3962f-b7aa-4809-ae78-0993bc60b191 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.595716] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3705bd19-6309-48bc-a519-8a5b43dabb48 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "860107df-4e9b-44b1-9e85-b0ee3a827268" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.535s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.725284] env[68437]: INFO nova.compute.manager [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Unrescuing [ 846.725587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.725720] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquired lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.725898] env[68437]: DEBUG nova.network.neutron [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 846.820504] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Successfully updated port: b47860d3-efcd-4110-8153-6bc15940189c {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.920861] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Releasing lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.921188] env[68437]: DEBUG nova.compute.manager [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Inject network info {{(pid=68437) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 846.921423] env[68437]: DEBUG nova.compute.manager [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] network_info to inject: |[{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 846.926900] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Reconfiguring VM instance to set the machine id {{(pid=68437) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 846.927495] env[68437]: DEBUG oslo_concurrency.lockutils [req-7c5911ed-3e9a-45be-adcc-d37aaea8f5b3 req-6f552c07-aad3-448e-898c-fd45a0acc8e1 service nova] Acquired lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.927795] env[68437]: DEBUG nova.network.neutron [req-7c5911ed-3e9a-45be-adcc-d37aaea8f5b3 req-6f552c07-aad3-448e-898c-fd45a0acc8e1 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Refreshing network info cache for port 102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 846.929158] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-105e90cc-fa4d-4440-8b6a-d34782afa2bb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.955777] env[68437]: DEBUG oslo_vmware.api [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 846.955777] env[68437]: value = "task-2944145" [ 846.955777] env[68437]: _type = "Task" [ 846.955777] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.982892] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944144, 'name': Rename_Task, 'duration_secs': 0.17588} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.990809] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.991590] env[68437]: DEBUG oslo_vmware.api [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2944145, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.992115] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-955197fa-2a2a-40b2-a38e-22bebf975901 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.003804] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 847.003804] env[68437]: value = "task-2944146" [ 847.003804] env[68437]: _type = "Task" [ 847.003804] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.018378] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944146, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.072911] env[68437]: INFO nova.compute.manager [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] instance snapshotting [ 847.074806] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20afd64-1502-46b5-8d4b-8452e325dac1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.100680] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a480f6-2ce0-40f5-afb9-79a286fa46c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.170897] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca765ba-84ae-42bd-8f76-1f06ab92da7f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.181398] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9c99b6-da19-4770-9a78-bae4aa44f5c6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.219551] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc5e85b-d142-4576-b246-73edb5ca6cd9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.232343] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c341f638-eb02-4a03-b9ac-f94a0c930267 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.249965] env[68437]: DEBUG nova.compute.provider_tree [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.326945] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "refresh_cache-ed1a81fd-dd4b-4126-96de-3c3f67cdca31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.326945] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "refresh_cache-ed1a81fd-dd4b-4126-96de-3c3f67cdca31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.326945] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 847.372153] env[68437]: DEBUG nova.network.neutron [req-7c5911ed-3e9a-45be-adcc-d37aaea8f5b3 req-6f552c07-aad3-448e-898c-fd45a0acc8e1 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updated VIF entry in instance network info cache for port 102fc7ce-ac0b-465b-8073-7ba895ea1293. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 847.372604] env[68437]: DEBUG nova.network.neutron [req-7c5911ed-3e9a-45be-adcc-d37aaea8f5b3 req-6f552c07-aad3-448e-898c-fd45a0acc8e1 service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updating instance_info_cache with network_info: [{"id": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "address": "fa:16:3e:65:55:29", "network": {"id": "cfabba59-a82c-49bc-adc9-5a13bd2a5c21", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-960382554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a8ec160fb3148c6aa238e70a975496f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11da2092-76f7-447e-babb-8fc14ad39a71", "external-id": "nsx-vlan-transportzone-585", "segmentation_id": 585, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102fc7ce-ac", "ovs_interfaceid": "102fc7ce-ac0b-465b-8073-7ba895ea1293", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.469080] env[68437]: DEBUG oslo_vmware.api [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2944145, 'name': ReconfigVM_Task, 'duration_secs': 0.222444} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.469080] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8ba9cd18-0674-4b93-beda-477423df1965 tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Reconfigured VM instance to set the machine id {{(pid=68437) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 847.519923] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944146, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.611812] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.612243] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.612560] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.612831] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.613140] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.616035] env[68437]: INFO nova.compute.manager [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Terminating instance [ 847.623379] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 847.623379] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-619c4169-f668-4206-9510-45e4bf397f8f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.636478] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 847.636478] env[68437]: value = "task-2944147" [ 847.636478] env[68437]: _type = "Task" [ 847.636478] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.644929] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944147, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.754416] env[68437]: DEBUG nova.scheduler.client.report [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 847.767990] env[68437]: DEBUG nova.compute.manager [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Received event network-vif-plugged-b47860d3-efcd-4110-8153-6bc15940189c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 847.767990] env[68437]: DEBUG oslo_concurrency.lockutils [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] Acquiring lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.767990] env[68437]: DEBUG oslo_concurrency.lockutils [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] Lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.767990] env[68437]: DEBUG oslo_concurrency.lockutils [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] Lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.767990] env[68437]: DEBUG nova.compute.manager [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] No waiting events found dispatching network-vif-plugged-b47860d3-efcd-4110-8153-6bc15940189c {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 847.768304] env[68437]: WARNING nova.compute.manager [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Received unexpected event network-vif-plugged-b47860d3-efcd-4110-8153-6bc15940189c for instance with vm_state building and task_state spawning. [ 847.768304] env[68437]: DEBUG nova.compute.manager [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Received event network-changed-b47860d3-efcd-4110-8153-6bc15940189c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 847.768304] env[68437]: DEBUG nova.compute.manager [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Refreshing instance network info cache due to event network-changed-b47860d3-efcd-4110-8153-6bc15940189c. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 847.768304] env[68437]: DEBUG oslo_concurrency.lockutils [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] Acquiring lock "refresh_cache-ed1a81fd-dd4b-4126-96de-3c3f67cdca31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.808050] env[68437]: DEBUG nova.network.neutron [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Updating instance_info_cache with network_info: [{"id": "9e2a02d6-0496-4807-b04b-bcedf775cfa4", "address": "fa:16:3e:f2:e2:7c", "network": {"id": "1c341ad6-0e1b-4211-be89-d39bb74a6f59", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-101911256-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "0ae9811689c645a7af2096a600ed6e1e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e2a02d6-04", "ovs_interfaceid": "9e2a02d6-0496-4807-b04b-bcedf775cfa4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.873720] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 847.875991] env[68437]: DEBUG oslo_concurrency.lockutils [req-7c5911ed-3e9a-45be-adcc-d37aaea8f5b3 req-6f552c07-aad3-448e-898c-fd45a0acc8e1 service nova] Releasing lock "refresh_cache-07d98c5c-ede8-4001-93b2-1b1d83687ca1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.018620] env[68437]: DEBUG oslo_vmware.api [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944146, 'name': PowerOnVM_Task, 'duration_secs': 0.6166} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.018879] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.019094] env[68437]: INFO nova.compute.manager [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Took 10.96 seconds to spawn the instance on the hypervisor. [ 848.019276] env[68437]: DEBUG nova.compute.manager [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.020126] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7760b03d-5b42-4b6d-87cf-72db924e8bed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.103846] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Updating instance_info_cache with network_info: [{"id": "b47860d3-efcd-4110-8153-6bc15940189c", "address": "fa:16:3e:92:84:63", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb47860d3-ef", "ovs_interfaceid": "b47860d3-efcd-4110-8153-6bc15940189c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.120801] env[68437]: DEBUG nova.compute.manager [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 848.122820] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 848.122820] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0d2bda-14af-40b2-969d-a20fad04b22d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.132161] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.132482] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a816f033-4959-4291-876f-e17e345b741c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.143548] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944147, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.144994] env[68437]: DEBUG oslo_vmware.api [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 848.144994] env[68437]: value = "task-2944148" [ 848.144994] env[68437]: _type = "Task" [ 848.144994] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.155299] env[68437]: DEBUG oslo_vmware.api [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2944148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.260026] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.210s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.262226] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.335s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.262460] env[68437]: DEBUG nova.objects.instance [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lazy-loading 'resources' on Instance uuid 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.298987] env[68437]: INFO nova.scheduler.client.report [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Deleted allocations for instance 995a3eae-c025-4efa-b509-0bf678bb0388 [ 848.310390] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Releasing lock "refresh_cache-acbf4c5c-341c-4ebd-ad29-90ebf531aa86" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.311089] env[68437]: DEBUG nova.objects.instance [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lazy-loading 'flavor' on Instance uuid acbf4c5c-341c-4ebd-ad29-90ebf531aa86 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.543187] env[68437]: INFO nova.compute.manager [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Took 47.86 seconds to build instance. [ 848.609033] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "refresh_cache-ed1a81fd-dd4b-4126-96de-3c3f67cdca31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.609033] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Instance network_info: |[{"id": "b47860d3-efcd-4110-8153-6bc15940189c", "address": "fa:16:3e:92:84:63", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb47860d3-ef", "ovs_interfaceid": "b47860d3-efcd-4110-8153-6bc15940189c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 848.609249] env[68437]: DEBUG oslo_concurrency.lockutils [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] Acquired lock "refresh_cache-ed1a81fd-dd4b-4126-96de-3c3f67cdca31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.609249] env[68437]: DEBUG nova.network.neutron [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Refreshing network info cache for port b47860d3-efcd-4110-8153-6bc15940189c {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 848.609867] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:84:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '028bae2d-fe6c-4207-b4a3-3fab45fbf1d6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b47860d3-efcd-4110-8153-6bc15940189c', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 848.622615] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.623626] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 848.623868] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2881aa12-8a0f-43c8-b556-0d0138f4eff9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.657815] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.657815] env[68437]: value = "task-2944149" [ 848.657815] env[68437]: _type = "Task" [ 848.657815] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.662832] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944147, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.673143] env[68437]: DEBUG oslo_vmware.api [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2944148, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.681989] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944149, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.810169] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f18d5eaf-b455-4e35-9f55-fd92d4aa61cf tempest-InstanceActionsNegativeTestJSON-1469538120 tempest-InstanceActionsNegativeTestJSON-1469538120-project-member] Lock "995a3eae-c025-4efa-b509-0bf678bb0388" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.653s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.821894] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b562a9bb-e8d0-464e-8eb7-aec5774a29b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.862060] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.864364] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d463c67b-f3bd-43ef-93de-cac4a3a83c34 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.876151] env[68437]: DEBUG oslo_vmware.api [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 848.876151] env[68437]: value = "task-2944150" [ 848.876151] env[68437]: _type = "Task" [ 848.876151] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.886446] env[68437]: DEBUG oslo_vmware.api [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944150, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.048631] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ab1fe24-f831-49f9-b3ed-aed7336baa84 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.671s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.155939] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944147, 'name': CreateSnapshot_Task, 'duration_secs': 1.183569} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.163371] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 849.164542] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74ca7cc-3815-4c7f-b15e-530c8c915083 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.178922] env[68437]: DEBUG oslo_vmware.api [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2944148, 'name': PowerOffVM_Task, 'duration_secs': 0.676548} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.184530] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.184730] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 849.190021] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1e5b289-05fc-4056-a621-24ad01b30201 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.202383] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944149, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.292236] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 849.292236] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 849.292425] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Deleting the datastore file [datastore1] 07d98c5c-ede8-4001-93b2-1b1d83687ca1 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.292689] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88c4bc24-e178-451c-96f8-17b75ff00360 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.306236] env[68437]: DEBUG oslo_vmware.api [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for the task: (returnval){ [ 849.306236] env[68437]: value = "task-2944152" [ 849.306236] env[68437]: _type = "Task" [ 849.306236] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.321220] env[68437]: DEBUG oslo_vmware.api [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2944152, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.391812] env[68437]: DEBUG oslo_vmware.api [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944150, 'name': PowerOffVM_Task, 'duration_secs': 0.225078} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.395423] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.404995] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Reconfiguring VM instance instance-0000002c to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 849.404995] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-683c1b6b-d617-47a5-af89-4ff844c8f73e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.440116] env[68437]: DEBUG oslo_vmware.api [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 849.440116] env[68437]: value = "task-2944153" [ 849.440116] env[68437]: _type = "Task" [ 849.440116] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.456184] env[68437]: DEBUG oslo_vmware.api [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944153, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.498254] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c869566-4463-4335-bfa8-4ba8dc056f7f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.507795] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f77c27-3623-498e-9d5b-b368303a672d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.547278] env[68437]: DEBUG nova.network.neutron [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Updated VIF entry in instance network info cache for port b47860d3-efcd-4110-8153-6bc15940189c. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 849.547278] env[68437]: DEBUG nova.network.neutron [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Updating instance_info_cache with network_info: [{"id": "b47860d3-efcd-4110-8153-6bc15940189c", "address": "fa:16:3e:92:84:63", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb47860d3-ef", "ovs_interfaceid": "b47860d3-efcd-4110-8153-6bc15940189c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.547795] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba64fa5-99ef-4367-96bf-01442068a49e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.554411] env[68437]: DEBUG nova.compute.manager [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 849.565398] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c990ba4-a5af-44d3-9ee0-32d2ff98d01c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.585585] env[68437]: DEBUG nova.compute.provider_tree [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.676178] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944149, 'name': CreateVM_Task, 'duration_secs': 0.889689} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.676386] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 849.677181] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.677385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.677770] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 849.678074] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c7def1f-9287-478e-b048-d5fa54224b3f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.684606] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 849.684606] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521d4c16-9139-a82a-ffb7-7a6bf954613b" [ 849.684606] env[68437]: _type = "Task" [ 849.684606] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.694909] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521d4c16-9139-a82a-ffb7-7a6bf954613b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.714078] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 849.714428] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-727b146d-7bca-4fdc-9808-b30c3ea9c907 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.727946] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 849.727946] env[68437]: value = "task-2944154" [ 849.727946] env[68437]: _type = "Task" [ 849.727946] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.737952] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944154, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.823020] env[68437]: DEBUG oslo_vmware.api [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Task: {'id': task-2944152, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.229348} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.823020] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.823020] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 849.823020] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 849.823020] env[68437]: INFO nova.compute.manager [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Took 1.70 seconds to destroy the instance on the hypervisor. [ 849.823545] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.823545] env[68437]: DEBUG nova.compute.manager [-] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 849.823545] env[68437]: DEBUG nova.network.neutron [-] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 849.957825] env[68437]: DEBUG oslo_vmware.api [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944153, 'name': ReconfigVM_Task, 'duration_secs': 0.276236} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.958194] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Reconfigured VM instance instance-0000002c to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 849.958418] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.958692] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-819a454a-d7da-4666-ba59-a7b9391f2ac9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.968832] env[68437]: DEBUG oslo_vmware.api [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 849.968832] env[68437]: value = "task-2944155" [ 849.968832] env[68437]: _type = "Task" [ 849.968832] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.988036] env[68437]: DEBUG oslo_vmware.api [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.056451] env[68437]: DEBUG oslo_concurrency.lockutils [req-0fb11c1d-cc7a-47dd-bbce-0e9a7461c8e4 req-cb3aa359-14a4-451a-b6aa-ed353963f854 service nova] Releasing lock "refresh_cache-ed1a81fd-dd4b-4126-96de-3c3f67cdca31" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.078107] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.091545] env[68437]: DEBUG nova.scheduler.client.report [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 850.199012] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521d4c16-9139-a82a-ffb7-7a6bf954613b, 'name': SearchDatastore_Task, 'duration_secs': 0.035421} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.200045] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.200045] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.201174] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.201385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.201608] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.202251] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42aa7e30-f4b8-48ed-86b1-4ee356ae1ec9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.217043] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.217263] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 850.218118] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1f09651-b4b3-4ab3-8260-673334a1a07c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.226809] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 850.226809] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bcfdca-f5c9-0765-9b3d-7fa73b6103b1" [ 850.226809] env[68437]: _type = "Task" [ 850.226809] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.241458] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bcfdca-f5c9-0765-9b3d-7fa73b6103b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.245556] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944154, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.482189] env[68437]: DEBUG oslo_vmware.api [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944155, 'name': PowerOnVM_Task, 'duration_secs': 0.401045} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.482531] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.482726] env[68437]: DEBUG nova.compute.manager [None req-4c62bc76-b025-49b3-bb28-4631a375926c tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 850.483633] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be6a509-c8ea-4b29-998b-421aecd6c67c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.600223] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.338s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.602560] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.108s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.604225] env[68437]: INFO nova.compute.claims [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.671537] env[68437]: INFO nova.scheduler.client.report [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted allocations for instance 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77 [ 850.743822] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bcfdca-f5c9-0765-9b3d-7fa73b6103b1, 'name': SearchDatastore_Task, 'duration_secs': 0.020664} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.745888] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-386992e7-853d-4fee-baea-bd61a9170129 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.752448] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944154, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.757170] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 850.757170] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52712eff-d383-ca7c-e318-dc876b559a3a" [ 850.757170] env[68437]: _type = "Task" [ 850.757170] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.767565] env[68437]: DEBUG nova.compute.manager [req-4106acd8-d5c1-4d89-8107-25b8182c7c15 req-5f10f1b2-b567-4f92-87cd-cfe13a414440 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Received event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 850.767854] env[68437]: DEBUG nova.compute.manager [req-4106acd8-d5c1-4d89-8107-25b8182c7c15 req-5f10f1b2-b567-4f92-87cd-cfe13a414440 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing instance network info cache due to event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 850.767930] env[68437]: DEBUG oslo_concurrency.lockutils [req-4106acd8-d5c1-4d89-8107-25b8182c7c15 req-5f10f1b2-b567-4f92-87cd-cfe13a414440 service nova] Acquiring lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.768084] env[68437]: DEBUG oslo_concurrency.lockutils [req-4106acd8-d5c1-4d89-8107-25b8182c7c15 req-5f10f1b2-b567-4f92-87cd-cfe13a414440 service nova] Acquired lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.768279] env[68437]: DEBUG nova.network.neutron [req-4106acd8-d5c1-4d89-8107-25b8182c7c15 req-5f10f1b2-b567-4f92-87cd-cfe13a414440 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 850.777728] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52712eff-d383-ca7c-e318-dc876b559a3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.912071] env[68437]: DEBUG nova.network.neutron [-] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.034996] env[68437]: DEBUG nova.compute.manager [req-418d1107-2dd6-4ecb-a565-0e568ec3c7cb req-4f15df00-476a-4662-a836-7466bf7e2d0b service nova] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Received event network-vif-deleted-102fc7ce-ac0b-465b-8073-7ba895ea1293 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 851.188615] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7498be71-343e-4e39-92db-e1d009968e44 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "6b10ff9f-3248-46fe-9cd4-19e0ebbcee77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.325s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.245975] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944154, 'name': CloneVM_Task} progress is 95%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.270120] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52712eff-d383-ca7c-e318-dc876b559a3a, 'name': SearchDatastore_Task, 'duration_secs': 0.020612} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.270961] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.271228] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ed1a81fd-dd4b-4126-96de-3c3f67cdca31/ed1a81fd-dd4b-4126-96de-3c3f67cdca31.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 851.271568] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b060590-62f8-47d1-a943-e8855cfe87ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.282636] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 851.282636] env[68437]: value = "task-2944156" [ 851.282636] env[68437]: _type = "Task" [ 851.282636] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.294522] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944156, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.416080] env[68437]: INFO nova.compute.manager [-] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Took 1.59 seconds to deallocate network for instance. [ 851.623422] env[68437]: DEBUG nova.network.neutron [req-4106acd8-d5c1-4d89-8107-25b8182c7c15 req-5f10f1b2-b567-4f92-87cd-cfe13a414440 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updated VIF entry in instance network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 851.624960] env[68437]: DEBUG nova.network.neutron [req-4106acd8-d5c1-4d89-8107-25b8182c7c15 req-5f10f1b2-b567-4f92-87cd-cfe13a414440 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updating instance_info_cache with network_info: [{"id": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "address": "fa:16:3e:ec:45:4e", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09d45df-fe", "ovs_interfaceid": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.752364] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944154, 'name': CloneVM_Task} progress is 95%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.796869] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944156, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.928251] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.128573] env[68437]: DEBUG oslo_concurrency.lockutils [req-4106acd8-d5c1-4d89-8107-25b8182c7c15 req-5f10f1b2-b567-4f92-87cd-cfe13a414440 service nova] Releasing lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.165068] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f20e9d4-dcd3-4053-b166-1091d0f3620f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.172824] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080477a5-55bf-4422-9663-0deed8c504c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.207616] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf48772-072f-4ecb-8508-b44e507f180b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.216488] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e73c51-e0f1-4ceb-8f97-f8460beb92ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.230788] env[68437]: DEBUG nova.compute.provider_tree [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.245575] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944154, 'name': CloneVM_Task, 'duration_secs': 2.258701} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.245869] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Created linked-clone VM from snapshot [ 852.246673] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243aaca1-8090-4a5e-aeca-2647614a8740 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.255151] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Uploading image 8fc60c9e-4848-456b-bd82-d6a590a39bd7 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 852.271933] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 852.272256] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4036955e-7c84-4114-8a74-351a2210ae2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.283734] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 852.283734] env[68437]: value = "task-2944157" [ 852.283734] env[68437]: _type = "Task" [ 852.283734] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.297343] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944157, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.300741] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944156, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.951586} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.300891] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ed1a81fd-dd4b-4126-96de-3c3f67cdca31/ed1a81fd-dd4b-4126-96de-3c3f67cdca31.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.301084] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.301491] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae9f3a61-bc0c-4daa-89cb-d44c57ac4c79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.308611] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 852.308611] env[68437]: value = "task-2944158" [ 852.308611] env[68437]: _type = "Task" [ 852.308611] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.318935] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944158, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.738621] env[68437]: DEBUG nova.scheduler.client.report [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 852.794483] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944157, 'name': Destroy_Task, 'duration_secs': 0.373274} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.795167] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Destroyed the VM [ 852.795445] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 852.795726] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bbb8e7a6-0eda-464c-8f18-5c4e47ddd129 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.803805] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 852.803805] env[68437]: value = "task-2944159" [ 852.803805] env[68437]: _type = "Task" [ 852.803805] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.815734] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944159, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.822097] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944158, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167829} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.822389] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 852.823182] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5515dce1-fedc-4573-9eb9-1a1e7f44cc08 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.847546] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] ed1a81fd-dd4b-4126-96de-3c3f67cdca31/ed1a81fd-dd4b-4126-96de-3c3f67cdca31.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.847925] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d59ee291-9b43-49dc-9f57-df20e0751675 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.870945] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 852.870945] env[68437]: value = "task-2944160" [ 852.870945] env[68437]: _type = "Task" [ 852.870945] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.883690] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.150763] env[68437]: DEBUG oslo_concurrency.lockutils [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.151083] env[68437]: DEBUG oslo_concurrency.lockutils [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.151291] env[68437]: DEBUG oslo_concurrency.lockutils [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.151687] env[68437]: DEBUG oslo_concurrency.lockutils [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.151687] env[68437]: DEBUG oslo_concurrency.lockutils [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.153874] env[68437]: INFO nova.compute.manager [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Terminating instance [ 853.246444] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.246922] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 853.249894] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.087s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.250125] env[68437]: DEBUG nova.objects.instance [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lazy-loading 'resources' on Instance uuid c5af19d6-5534-45e6-8c9c-dacf30d4fb1a {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.314011] env[68437]: DEBUG nova.compute.manager [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Received event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 853.314316] env[68437]: DEBUG nova.compute.manager [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing instance network info cache due to event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 853.314583] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] Acquiring lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.314830] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] Acquired lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.314991] env[68437]: DEBUG nova.network.neutron [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 853.319744] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944159, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.382449] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944160, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.657585] env[68437]: DEBUG nova.compute.manager [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 853.657905] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 853.658789] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d3dea5-6dc1-471d-a2ec-70fa602205c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.667287] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 853.669578] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a187cb9-604b-4889-acf0-2277aa7281f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.678261] env[68437]: DEBUG oslo_vmware.api [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 853.678261] env[68437]: value = "task-2944161" [ 853.678261] env[68437]: _type = "Task" [ 853.678261] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.687855] env[68437]: DEBUG oslo_vmware.api [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.757597] env[68437]: DEBUG nova.compute.utils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 853.760333] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 853.760333] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 853.807710] env[68437]: DEBUG nova.policy [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59b6e538d77d441e852466b24b70e0a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0e56fa6cd94413d82963b143143f519', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 853.828341] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944159, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.884933] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944160, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.165677] env[68437]: DEBUG nova.network.neutron [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updated VIF entry in instance network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 854.166285] env[68437]: DEBUG nova.network.neutron [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updating instance_info_cache with network_info: [{"id": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "address": "fa:16:3e:ec:45:4e", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09d45df-fe", "ovs_interfaceid": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.192621] env[68437]: DEBUG oslo_vmware.api [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944161, 'name': PowerOffVM_Task, 'duration_secs': 0.299805} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.192913] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 854.193341] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 854.193650] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93fc7d87-a5ec-446d-ba87-96cbf8207288 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.263280] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 854.271252] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 854.271688] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 854.271947] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Deleting the datastore file [datastore1] acbf4c5c-341c-4ebd-ad29-90ebf531aa86 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.272441] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1d8a381-128a-4810-9858-4e867755fcfe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.279885] env[68437]: DEBUG oslo_vmware.api [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 854.279885] env[68437]: value = "task-2944163" [ 854.279885] env[68437]: _type = "Task" [ 854.279885] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.290475] env[68437]: DEBUG oslo_vmware.api [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.323974] env[68437]: DEBUG oslo_vmware.api [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944159, 'name': RemoveSnapshot_Task, 'duration_secs': 1.138635} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.324806] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Successfully created port: dcc5d60a-dad1-435b-8b40-7f436bfd157b {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 854.329046] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 854.374353] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a361bf7-24a7-4659-b836-e878ade521b3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.392575] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944160, 'name': ReconfigVM_Task, 'duration_secs': 1.15276} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.392979] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Reconfigured VM instance instance-00000031 to attach disk [datastore2] ed1a81fd-dd4b-4126-96de-3c3f67cdca31/ed1a81fd-dd4b-4126-96de-3c3f67cdca31.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 854.394439] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a872fb-c755-4f46-b1e1-f4014a7b4e32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.398983] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ff1af7b-6e48-49a6-8a18-ca1dd84070c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.437900] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81460b10-84e8-4150-b32c-eaefbb14da20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.441166] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 854.441166] env[68437]: value = "task-2944164" [ 854.441166] env[68437]: _type = "Task" [ 854.441166] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.449653] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4408b3-230e-4a45-b39d-b43d4df1ce5e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.456788] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944164, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.466696] env[68437]: DEBUG nova.compute.provider_tree [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.671600] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] Releasing lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.671901] env[68437]: DEBUG nova.compute.manager [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Received event network-changed-1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 854.672437] env[68437]: DEBUG nova.compute.manager [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Refreshing instance network info cache due to event network-changed-1fd37ce5-a7b4-43f3-8189-845c59896665. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 854.672437] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] Acquiring lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.672437] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] Acquired lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.672661] env[68437]: DEBUG nova.network.neutron [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Refreshing network info cache for port 1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 854.791967] env[68437]: DEBUG oslo_vmware.api [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.407634} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.792715] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.793012] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 854.793348] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 854.794802] env[68437]: INFO nova.compute.manager [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Took 1.14 seconds to destroy the instance on the hypervisor. [ 854.794802] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 854.794802] env[68437]: DEBUG nova.compute.manager [-] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 854.794802] env[68437]: DEBUG nova.network.neutron [-] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 854.835156] env[68437]: WARNING nova.compute.manager [None req-6c3b5e3e-68e1-4aa1-a96f-00ffa9ecd5e5 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Image not found during snapshot: nova.exception.ImageNotFound: Image 8fc60c9e-4848-456b-bd82-d6a590a39bd7 could not be found. [ 854.955797] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944164, 'name': Rename_Task, 'duration_secs': 0.338069} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.956192] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.957297] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2790e65-85e1-43f4-842e-a819000089b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.965756] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 854.965756] env[68437]: value = "task-2944165" [ 854.965756] env[68437]: _type = "Task" [ 854.965756] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.969522] env[68437]: DEBUG nova.scheduler.client.report [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.978984] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944165, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.247949] env[68437]: DEBUG nova.compute.manager [req-4b11610f-e564-45cc-b14b-6bb9d7ffab1b req-633eb8d9-66d5-43e6-ae83-6cae239638c1 service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Received event network-vif-deleted-9e2a02d6-0496-4807-b04b-bcedf775cfa4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 855.248237] env[68437]: INFO nova.compute.manager [req-4b11610f-e564-45cc-b14b-6bb9d7ffab1b req-633eb8d9-66d5-43e6-ae83-6cae239638c1 service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Neutron deleted interface 9e2a02d6-0496-4807-b04b-bcedf775cfa4; detaching it from the instance and deleting it from the info cache [ 855.248367] env[68437]: DEBUG nova.network.neutron [req-4b11610f-e564-45cc-b14b-6bb9d7ffab1b req-633eb8d9-66d5-43e6-ae83-6cae239638c1 service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.275585] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 855.309128] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 855.309128] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.309128] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 855.309536] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.309867] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 855.310202] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 855.310723] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 855.312036] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 855.312036] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 855.312036] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 855.312036] env[68437]: DEBUG nova.virt.hardware [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 855.313316] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9e897a-4719-47d4-8a2d-d344d2314b3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.325500] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208b7b45-35cc-4117-9b13-d9e8d77b99f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.479402] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.227s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.479402] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944165, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.479638] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.183s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.480385] env[68437]: DEBUG nova.objects.instance [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lazy-loading 'resources' on Instance uuid b92efa60-ef18-4578-b00d-6a2438e7eacf {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.502774] env[68437]: INFO nova.scheduler.client.report [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Deleted allocations for instance c5af19d6-5534-45e6-8c9c-dacf30d4fb1a [ 855.527452] env[68437]: DEBUG nova.network.neutron [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updated VIF entry in instance network info cache for port 1fd37ce5-a7b4-43f3-8189-845c59896665. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 855.527452] env[68437]: DEBUG nova.network.neutron [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updating instance_info_cache with network_info: [{"id": "1fd37ce5-a7b4-43f3-8189-845c59896665", "address": "fa:16:3e:b7:fa:48", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fd37ce5-a7", "ovs_interfaceid": "1fd37ce5-a7b4-43f3-8189-845c59896665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.533664] env[68437]: DEBUG nova.network.neutron [-] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.716316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.716316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.716316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.716316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.716597] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.717327] env[68437]: INFO nova.compute.manager [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Terminating instance [ 855.756021] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b6a8d32-ceb6-4dba-8ca4-f07d5d48c35a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.766021] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c1bea1-c009-4224-a259-be4d383d94a6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.808149] env[68437]: DEBUG nova.compute.manager [req-4b11610f-e564-45cc-b14b-6bb9d7ffab1b req-633eb8d9-66d5-43e6-ae83-6cae239638c1 service nova] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Detach interface failed, port_id=9e2a02d6-0496-4807-b04b-bcedf775cfa4, reason: Instance acbf4c5c-341c-4ebd-ad29-90ebf531aa86 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 855.870756] env[68437]: DEBUG nova.compute.manager [req-4f4beec5-55ac-454c-b11c-cd029e901c5d req-20c01b64-fb35-49a2-b8ff-397f16838ed8 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Received event network-changed-1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 855.870756] env[68437]: DEBUG nova.compute.manager [req-4f4beec5-55ac-454c-b11c-cd029e901c5d req-20c01b64-fb35-49a2-b8ff-397f16838ed8 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Refreshing instance network info cache due to event network-changed-1fd37ce5-a7b4-43f3-8189-845c59896665. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 855.870756] env[68437]: DEBUG oslo_concurrency.lockutils [req-4f4beec5-55ac-454c-b11c-cd029e901c5d req-20c01b64-fb35-49a2-b8ff-397f16838ed8 service nova] Acquiring lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.976956] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944165, 'name': PowerOnVM_Task, 'duration_secs': 0.726645} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.978066] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.978066] env[68437]: INFO nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Took 10.20 seconds to spawn the instance on the hypervisor. [ 855.978066] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.980235] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75418322-34f3-46f2-bf39-6e877fc3fe6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.013071] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd9c879d-8855-420e-bb49-de79eec9d7cf tempest-ServerExternalEventsTest-385152037 tempest-ServerExternalEventsTest-385152037-project-member] Lock "c5af19d6-5534-45e6-8c9c-dacf30d4fb1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.564s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.033838] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9898291-228c-485b-9c14-f23825dd6bfd req-d23a068d-7624-441c-9885-a39ec8dac944 service nova] Releasing lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.035818] env[68437]: DEBUG oslo_concurrency.lockutils [req-4f4beec5-55ac-454c-b11c-cd029e901c5d req-20c01b64-fb35-49a2-b8ff-397f16838ed8 service nova] Acquired lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.036703] env[68437]: DEBUG nova.network.neutron [req-4f4beec5-55ac-454c-b11c-cd029e901c5d req-20c01b64-fb35-49a2-b8ff-397f16838ed8 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Refreshing network info cache for port 1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 856.038370] env[68437]: INFO nova.compute.manager [-] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Took 1.24 seconds to deallocate network for instance. [ 856.224235] env[68437]: DEBUG nova.compute.manager [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 856.224235] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 856.225180] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fd0cc1-f123-4cb8-bc00-a9fb7f772a87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.234372] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 856.234940] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba8435b3-03ae-4f3a-8bf8-1ed9e4c5a956 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.249601] env[68437]: DEBUG oslo_vmware.api [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 856.249601] env[68437]: value = "task-2944166" [ 856.249601] env[68437]: _type = "Task" [ 856.249601] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.252767] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Successfully updated port: dcc5d60a-dad1-435b-8b40-7f436bfd157b {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 856.261434] env[68437]: DEBUG oslo_vmware.api [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944166, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.485990] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bbae4e-7772-4496-b8a8-2ce794e7271b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.496384] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd71979-800a-4221-a883-41ed2890ca05 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.543323] env[68437]: INFO nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Took 42.38 seconds to build instance. [ 856.545103] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf56b56-d241-4399-be2c-29eace98edf8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.549999] env[68437]: DEBUG oslo_concurrency.lockutils [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.550442] env[68437]: DEBUG oslo_concurrency.lockutils [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "interface-27c18765-38cf-41d6-9139-9acffa94fbe6-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.550710] env[68437]: DEBUG oslo_concurrency.lockutils [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-27c18765-38cf-41d6-9139-9acffa94fbe6-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.551652] env[68437]: DEBUG nova.objects.instance [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'flavor' on Instance uuid 27c18765-38cf-41d6-9139-9acffa94fbe6 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 856.559070] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8b5f62-885a-443e-9625-966ebf9452d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.576060] env[68437]: DEBUG nova.compute.provider_tree [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.765411] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "refresh_cache-fc62ff9d-1bd8-4b32-9e71-41410276802d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.765411] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "refresh_cache-fc62ff9d-1bd8-4b32-9e71-41410276802d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.765411] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 856.765411] env[68437]: DEBUG oslo_vmware.api [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944166, 'name': PowerOffVM_Task, 'duration_secs': 0.311659} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.765411] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 856.765737] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 856.767410] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d36c4713-f273-4780-871a-6495d6939a82 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.846489] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 856.846836] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 856.846930] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Deleting the datastore file [datastore1] 26985e45-21ff-40bb-ac2b-c6f3700ccc97 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 856.848331] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acefb65f-706e-4403-9d20-478da90e1bb1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.859583] env[68437]: DEBUG oslo_vmware.api [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 856.859583] env[68437]: value = "task-2944168" [ 856.859583] env[68437]: _type = "Task" [ 856.859583] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.869900] env[68437]: DEBUG oslo_vmware.api [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.873595] env[68437]: DEBUG nova.network.neutron [req-4f4beec5-55ac-454c-b11c-cd029e901c5d req-20c01b64-fb35-49a2-b8ff-397f16838ed8 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updated VIF entry in instance network info cache for port 1fd37ce5-a7b4-43f3-8189-845c59896665. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 856.874684] env[68437]: DEBUG nova.network.neutron [req-4f4beec5-55ac-454c-b11c-cd029e901c5d req-20c01b64-fb35-49a2-b8ff-397f16838ed8 service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updating instance_info_cache with network_info: [{"id": "1fd37ce5-a7b4-43f3-8189-845c59896665", "address": "fa:16:3e:b7:fa:48", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fd37ce5-a7", "ovs_interfaceid": "1fd37ce5-a7b4-43f3-8189-845c59896665", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.050228] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.187s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.080633] env[68437]: DEBUG nova.scheduler.client.report [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 857.126611] env[68437]: DEBUG nova.objects.instance [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'pci_requests' on Instance uuid 27c18765-38cf-41d6-9139-9acffa94fbe6 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.305244] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 857.354843] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.355141] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.355356] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.355547] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.355917] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.358433] env[68437]: INFO nova.compute.manager [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Terminating instance [ 857.379267] env[68437]: DEBUG oslo_concurrency.lockutils [req-4f4beec5-55ac-454c-b11c-cd029e901c5d req-20c01b64-fb35-49a2-b8ff-397f16838ed8 service nova] Releasing lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.381561] env[68437]: DEBUG oslo_vmware.api [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944168, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200748} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.382598] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 857.382864] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 857.383300] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 857.383578] env[68437]: INFO nova.compute.manager [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Took 1.16 seconds to destroy the instance on the hypervisor. [ 857.384684] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 857.385602] env[68437]: DEBUG nova.compute.manager [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 857.385758] env[68437]: DEBUG nova.network.neutron [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 857.554169] env[68437]: DEBUG nova.compute.manager [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 857.559184] env[68437]: DEBUG nova.network.neutron [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Updating instance_info_cache with network_info: [{"id": "dcc5d60a-dad1-435b-8b40-7f436bfd157b", "address": "fa:16:3e:6a:b4:79", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcc5d60a-da", "ovs_interfaceid": "dcc5d60a-dad1-435b-8b40-7f436bfd157b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.588886] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.109s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.595025] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.234s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.597358] env[68437]: INFO nova.compute.claims [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.616581] env[68437]: INFO nova.scheduler.client.report [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Deleted allocations for instance b92efa60-ef18-4578-b00d-6a2438e7eacf [ 857.630446] env[68437]: DEBUG nova.objects.base [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Object Instance<27c18765-38cf-41d6-9139-9acffa94fbe6> lazy-loaded attributes: flavor,pci_requests {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 857.630669] env[68437]: DEBUG nova.network.neutron [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 857.701937] env[68437]: DEBUG nova.policy [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '894a53f57a104c51945fa90c168a0483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68baf1daffa842b4adb854fe0cec9524', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 857.874871] env[68437]: DEBUG nova.compute.manager [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 857.877581] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 857.877581] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1803d6ba-ce8f-4c3b-a735-73c18c015edf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.889723] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 857.892642] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-353aa7ea-2082-419f-bee6-2be9d43a4445 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.898804] env[68437]: DEBUG oslo_vmware.api [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 857.898804] env[68437]: value = "task-2944169" [ 857.898804] env[68437]: _type = "Task" [ 857.898804] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.910653] env[68437]: DEBUG oslo_vmware.api [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944169, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.062598] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "refresh_cache-fc62ff9d-1bd8-4b32-9e71-41410276802d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.062889] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Instance network_info: |[{"id": "dcc5d60a-dad1-435b-8b40-7f436bfd157b", "address": "fa:16:3e:6a:b4:79", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcc5d60a-da", "ovs_interfaceid": "dcc5d60a-dad1-435b-8b40-7f436bfd157b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 858.063327] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:b4:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '028bae2d-fe6c-4207-b4a3-3fab45fbf1d6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcc5d60a-dad1-435b-8b40-7f436bfd157b', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 858.071136] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 858.073529] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 858.073953] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb481e82-5ccb-403b-8025-be78e2af0678 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.101286] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.112301] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 858.112301] env[68437]: value = "task-2944170" [ 858.112301] env[68437]: _type = "Task" [ 858.112301] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.121391] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944170, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.125201] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5be4fb23-59e5-4d11-8e26-395bfd7fd05f tempest-ServersV294TestFqdnHostnames-2078102825 tempest-ServersV294TestFqdnHostnames-2078102825-project-member] Lock "b92efa60-ef18-4578-b00d-6a2438e7eacf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.793s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.129868] env[68437]: DEBUG nova.compute.manager [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Received event network-vif-plugged-dcc5d60a-dad1-435b-8b40-7f436bfd157b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 858.129950] env[68437]: DEBUG oslo_concurrency.lockutils [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] Acquiring lock "fc62ff9d-1bd8-4b32-9e71-41410276802d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.130143] env[68437]: DEBUG oslo_concurrency.lockutils [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] Lock "fc62ff9d-1bd8-4b32-9e71-41410276802d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.130328] env[68437]: DEBUG oslo_concurrency.lockutils [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] Lock "fc62ff9d-1bd8-4b32-9e71-41410276802d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.130795] env[68437]: DEBUG nova.compute.manager [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] No waiting events found dispatching network-vif-plugged-dcc5d60a-dad1-435b-8b40-7f436bfd157b {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 858.130795] env[68437]: WARNING nova.compute.manager [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Received unexpected event network-vif-plugged-dcc5d60a-dad1-435b-8b40-7f436bfd157b for instance with vm_state building and task_state spawning. [ 858.130911] env[68437]: DEBUG nova.compute.manager [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Received event network-changed-dcc5d60a-dad1-435b-8b40-7f436bfd157b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 858.131137] env[68437]: DEBUG nova.compute.manager [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Refreshing instance network info cache due to event network-changed-dcc5d60a-dad1-435b-8b40-7f436bfd157b. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 858.131281] env[68437]: DEBUG oslo_concurrency.lockutils [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] Acquiring lock "refresh_cache-fc62ff9d-1bd8-4b32-9e71-41410276802d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.131427] env[68437]: DEBUG oslo_concurrency.lockutils [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] Acquired lock "refresh_cache-fc62ff9d-1bd8-4b32-9e71-41410276802d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.131591] env[68437]: DEBUG nova.network.neutron [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Refreshing network info cache for port dcc5d60a-dad1-435b-8b40-7f436bfd157b {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 858.146649] env[68437]: DEBUG nova.network.neutron [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Successfully created port: 7bfe6ebf-7a19-4548-afbe-c423b4ceb680 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 858.317483] env[68437]: DEBUG nova.network.neutron [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.411799] env[68437]: DEBUG oslo_vmware.api [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944169, 'name': PowerOffVM_Task, 'duration_secs': 0.19045} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.412970] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 858.413346] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 858.413594] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d131112-0195-4dd6-8de0-cff7a1a84dde {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.501580] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 858.501802] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 858.502432] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Deleting the datastore file [datastore1] f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 858.502432] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78e5f80d-76ff-405f-b77e-72df41224643 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.509330] env[68437]: DEBUG oslo_vmware.api [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 858.509330] env[68437]: value = "task-2944172" [ 858.509330] env[68437]: _type = "Task" [ 858.509330] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.517799] env[68437]: DEBUG oslo_vmware.api [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.622584] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944170, 'name': CreateVM_Task, 'duration_secs': 0.395743} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.622815] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 858.624621] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.624621] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.624621] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 858.624621] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96b3f7bd-62ed-49a5-9792-7924cc153e43 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.630888] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 858.630888] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52108409-29dc-5a31-fcc4-805662f27752" [ 858.630888] env[68437]: _type = "Task" [ 858.630888] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.643058] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52108409-29dc-5a31-fcc4-805662f27752, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.819319] env[68437]: INFO nova.compute.manager [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Took 1.43 seconds to deallocate network for instance. [ 858.848563] env[68437]: DEBUG nova.network.neutron [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Updated VIF entry in instance network info cache for port dcc5d60a-dad1-435b-8b40-7f436bfd157b. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 858.848925] env[68437]: DEBUG nova.network.neutron [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Updating instance_info_cache with network_info: [{"id": "dcc5d60a-dad1-435b-8b40-7f436bfd157b", "address": "fa:16:3e:6a:b4:79", "network": {"id": "404699e8-c917-4330-bbef-e17978baa3ec", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1166671453-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0e56fa6cd94413d82963b143143f519", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "028bae2d-fe6c-4207-b4a3-3fab45fbf1d6", "external-id": "nsx-vlan-transportzone-955", "segmentation_id": 955, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcc5d60a-da", "ovs_interfaceid": "dcc5d60a-dad1-435b-8b40-7f436bfd157b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.023892] env[68437]: DEBUG oslo_vmware.api [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254217} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.026720] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.026720] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 859.026803] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 859.027063] env[68437]: INFO nova.compute.manager [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Took 1.15 seconds to destroy the instance on the hypervisor. [ 859.027406] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.030368] env[68437]: DEBUG nova.compute.manager [-] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 859.030491] env[68437]: DEBUG nova.network.neutron [-] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 859.144060] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52108409-29dc-5a31-fcc4-805662f27752, 'name': SearchDatastore_Task, 'duration_secs': 0.011348} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.144559] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.144832] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 859.145082] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.145236] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 859.145417] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 859.145699] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6f52a76-220b-4fab-924b-d96fad998f06 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.163105] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 859.163530] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 859.164845] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be209d87-f85f-45cc-8f07-6bbd6c4c5d31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.173881] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 859.173881] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d5a8c3-2306-4ef3-8169-69a5e6b2195a" [ 859.173881] env[68437]: _type = "Task" [ 859.173881] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.179633] env[68437]: DEBUG nova.compute.manager [req-aaf78fd7-4715-472e-abde-f3e6aa503619 req-701a4630-e05c-401a-ba0c-ddf2ca5afb27 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Received event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 859.179804] env[68437]: DEBUG nova.compute.manager [req-aaf78fd7-4715-472e-abde-f3e6aa503619 req-701a4630-e05c-401a-ba0c-ddf2ca5afb27 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing instance network info cache due to event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 859.180084] env[68437]: DEBUG oslo_concurrency.lockutils [req-aaf78fd7-4715-472e-abde-f3e6aa503619 req-701a4630-e05c-401a-ba0c-ddf2ca5afb27 service nova] Acquiring lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.180256] env[68437]: DEBUG oslo_concurrency.lockutils [req-aaf78fd7-4715-472e-abde-f3e6aa503619 req-701a4630-e05c-401a-ba0c-ddf2ca5afb27 service nova] Acquired lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 859.180432] env[68437]: DEBUG nova.network.neutron [req-aaf78fd7-4715-472e-abde-f3e6aa503619 req-701a4630-e05c-401a-ba0c-ddf2ca5afb27 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 859.183631] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac399e5e-2dce-4769-bc0a-6531f41fd103 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.191499] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d5a8c3-2306-4ef3-8169-69a5e6b2195a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.198508] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07644909-8c5f-49e1-8df2-628b470346be {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.233508] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a0ef5c-f486-4391-8fe9-3b261fefecc3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.241758] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffaadb6-e753-49db-8fae-faffe32e2f7f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.256696] env[68437]: DEBUG nova.compute.provider_tree [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.328731] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.355234] env[68437]: DEBUG oslo_concurrency.lockutils [req-cd441fc0-dc0f-43ac-a289-87f846b283c9 req-f125402e-39fd-4129-8fb6-84e1705df694 service nova] Releasing lock "refresh_cache-fc62ff9d-1bd8-4b32-9e71-41410276802d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.685563] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d5a8c3-2306-4ef3-8169-69a5e6b2195a, 'name': SearchDatastore_Task, 'duration_secs': 0.040151} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.685820] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ff79fa-8b71-4fef-a2ad-a63afc1a7479 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.694527] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 859.694527] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52326b9a-df29-24f6-9ed7-a7e1a50fc978" [ 859.694527] env[68437]: _type = "Task" [ 859.694527] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.706264] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52326b9a-df29-24f6-9ed7-a7e1a50fc978, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.761988] env[68437]: DEBUG nova.scheduler.client.report [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 859.776663] env[68437]: DEBUG nova.network.neutron [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Successfully updated port: 7bfe6ebf-7a19-4548-afbe-c423b4ceb680 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 859.806201] env[68437]: DEBUG nova.network.neutron [-] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.923800] env[68437]: DEBUG nova.network.neutron [req-aaf78fd7-4715-472e-abde-f3e6aa503619 req-701a4630-e05c-401a-ba0c-ddf2ca5afb27 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updated VIF entry in instance network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 859.924672] env[68437]: DEBUG nova.network.neutron [req-aaf78fd7-4715-472e-abde-f3e6aa503619 req-701a4630-e05c-401a-ba0c-ddf2ca5afb27 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updating instance_info_cache with network_info: [{"id": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "address": "fa:16:3e:ec:45:4e", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09d45df-fe", "ovs_interfaceid": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.206957] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52326b9a-df29-24f6-9ed7-a7e1a50fc978, 'name': SearchDatastore_Task, 'duration_secs': 0.036643} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.208113] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 860.208431] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] fc62ff9d-1bd8-4b32-9e71-41410276802d/fc62ff9d-1bd8-4b32-9e71-41410276802d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 860.208637] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f0632ad-6f4a-4771-81d3-c36cfe8f6a1d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.216576] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 860.216576] env[68437]: value = "task-2944173" [ 860.216576] env[68437]: _type = "Task" [ 860.216576] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.224495] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944173, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.267751] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.673s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.268387] env[68437]: DEBUG nova.compute.manager [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 860.271156] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.695s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.272552] env[68437]: INFO nova.compute.claims [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.281420] env[68437]: DEBUG oslo_concurrency.lockutils [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.281642] env[68437]: DEBUG oslo_concurrency.lockutils [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.281861] env[68437]: DEBUG nova.network.neutron [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 860.308889] env[68437]: INFO nova.compute.manager [-] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Took 1.28 seconds to deallocate network for instance. [ 860.428736] env[68437]: DEBUG oslo_concurrency.lockutils [req-aaf78fd7-4715-472e-abde-f3e6aa503619 req-701a4630-e05c-401a-ba0c-ddf2ca5afb27 service nova] Releasing lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 860.466715] env[68437]: DEBUG nova.compute.manager [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Received event network-vif-deleted-1fd37ce5-a7b4-43f3-8189-845c59896665 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 860.466715] env[68437]: DEBUG nova.compute.manager [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Received event network-vif-deleted-eeaa6a4a-be89-4fbc-acaa-32fa246347f4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 860.466715] env[68437]: DEBUG nova.compute.manager [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Received event network-vif-plugged-7bfe6ebf-7a19-4548-afbe-c423b4ceb680 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 860.467119] env[68437]: DEBUG oslo_concurrency.lockutils [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] Acquiring lock "27c18765-38cf-41d6-9139-9acffa94fbe6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.467278] env[68437]: DEBUG oslo_concurrency.lockutils [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.467387] env[68437]: DEBUG oslo_concurrency.lockutils [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.467587] env[68437]: DEBUG nova.compute.manager [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] No waiting events found dispatching network-vif-plugged-7bfe6ebf-7a19-4548-afbe-c423b4ceb680 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 860.467706] env[68437]: WARNING nova.compute.manager [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Received unexpected event network-vif-plugged-7bfe6ebf-7a19-4548-afbe-c423b4ceb680 for instance with vm_state active and task_state None. [ 860.467910] env[68437]: DEBUG nova.compute.manager [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Received event network-changed-7bfe6ebf-7a19-4548-afbe-c423b4ceb680 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 860.468044] env[68437]: DEBUG nova.compute.manager [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Refreshing instance network info cache due to event network-changed-7bfe6ebf-7a19-4548-afbe-c423b4ceb680. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 860.468243] env[68437]: DEBUG oslo_concurrency.lockutils [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] Acquiring lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.561506] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.730019] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944173, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.437638} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.730019] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] fc62ff9d-1bd8-4b32-9e71-41410276802d/fc62ff9d-1bd8-4b32-9e71-41410276802d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 860.730019] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 860.730019] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8a51609-1923-4d5a-929f-e20f9ab386ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.737502] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 860.737502] env[68437]: value = "task-2944174" [ 860.737502] env[68437]: _type = "Task" [ 860.737502] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.756353] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944174, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.782212] env[68437]: DEBUG nova.compute.utils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 860.786616] env[68437]: DEBUG nova.compute.manager [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 860.786616] env[68437]: DEBUG nova.network.neutron [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 860.818025] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.840070] env[68437]: WARNING nova.network.neutron [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] 6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88 already exists in list: networks containing: ['6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88']. ignoring it [ 860.903847] env[68437]: DEBUG nova.policy [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8f30aea8b2b4ea1a6eb7d30875a4c5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '188b948736f44dfa8dd9aeb258180c58', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 861.247498] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944174, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068374} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.247770] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 861.248672] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d47ae73-4031-4c75-9b93-af3975532a8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.274725] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] fc62ff9d-1bd8-4b32-9e71-41410276802d/fc62ff9d-1bd8-4b32-9e71-41410276802d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 861.277043] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01081d9c-700e-4e20-a25b-064260f6b339 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.293556] env[68437]: DEBUG nova.compute.manager [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 861.310204] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 861.310204] env[68437]: value = "task-2944175" [ 861.310204] env[68437]: _type = "Task" [ 861.310204] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.320475] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944175, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.587749] env[68437]: DEBUG nova.network.neutron [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updating instance_info_cache with network_info: [{"id": "755ab792-6755-4f3f-8d83-38106672f90b", "address": "fa:16:3e:c5:b6:a0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755ab792-67", "ovs_interfaceid": "755ab792-6755-4f3f-8d83-38106672f90b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7bfe6ebf-7a19-4548-afbe-c423b4ceb680", "address": "fa:16:3e:a1:9a:22", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bfe6ebf-7a", "ovs_interfaceid": "7bfe6ebf-7a19-4548-afbe-c423b4ceb680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.591824] env[68437]: DEBUG nova.network.neutron [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Successfully created port: 7a0fc670-7d4b-404c-bd1b-73f31da1c3d4 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.713417] env[68437]: DEBUG nova.compute.manager [req-1d0653e3-e31d-437c-844c-e32facab14c4 req-5fdf9855-34cd-44ea-98a6-a601f344cbeb service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Received event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 861.713614] env[68437]: DEBUG nova.compute.manager [req-1d0653e3-e31d-437c-844c-e32facab14c4 req-5fdf9855-34cd-44ea-98a6-a601f344cbeb service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing instance network info cache due to event network-changed-c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 861.713828] env[68437]: DEBUG oslo_concurrency.lockutils [req-1d0653e3-e31d-437c-844c-e32facab14c4 req-5fdf9855-34cd-44ea-98a6-a601f344cbeb service nova] Acquiring lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.713965] env[68437]: DEBUG oslo_concurrency.lockutils [req-1d0653e3-e31d-437c-844c-e32facab14c4 req-5fdf9855-34cd-44ea-98a6-a601f344cbeb service nova] Acquired lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.714204] env[68437]: DEBUG nova.network.neutron [req-1d0653e3-e31d-437c-844c-e32facab14c4 req-5fdf9855-34cd-44ea-98a6-a601f344cbeb service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Refreshing network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 861.735793] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "353ebb37-7e69-49d4-873e-2272cbfff6e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.736040] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.810135] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588a0e04-290c-4383-8d9e-645f4162d212 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.824169] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dd86eb-2629-440b-8a8b-fd35b35b19da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.827494] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944175, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.857480] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b39a19-004f-4e5c-b51b-a3d3df77b001 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.868019] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699b0dd4-045f-414f-9ff2-685f11e52105 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.888106] env[68437]: DEBUG nova.compute.provider_tree [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.092271] env[68437]: DEBUG oslo_concurrency.lockutils [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.092948] env[68437]: DEBUG oslo_concurrency.lockutils [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.093118] env[68437]: DEBUG oslo_concurrency.lockutils [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.093414] env[68437]: DEBUG oslo_concurrency.lockutils [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] Acquired lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.093649] env[68437]: DEBUG nova.network.neutron [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Refreshing network info cache for port 7bfe6ebf-7a19-4548-afbe-c423b4ceb680 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 862.098028] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765fad03-d41c-4d94-aeb7-83528ef02fb6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.116032] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 862.116916] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.117232] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 862.117534] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.117793] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 862.118079] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 862.118445] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 862.118714] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 862.118998] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 862.119402] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 862.119739] env[68437]: DEBUG nova.virt.hardware [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 862.126278] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Reconfiguring VM to attach interface {{(pid=68437) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 862.127602] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a5b0606-00b0-4a71-af84-25d49d07deb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.151153] env[68437]: DEBUG oslo_vmware.api [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 862.151153] env[68437]: value = "task-2944176" [ 862.151153] env[68437]: _type = "Task" [ 862.151153] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.157987] env[68437]: DEBUG oslo_vmware.api [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944176, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.312183] env[68437]: DEBUG nova.compute.manager [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 862.326955] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944175, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.335733] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 862.335999] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.336177] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 862.336380] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.337306] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 862.337542] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 862.337790] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 862.337962] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 862.338183] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 862.338381] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 862.338573] env[68437]: DEBUG nova.virt.hardware [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 862.339497] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99122e0-eb03-4fac-95e0-606a76792f18 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.350583] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbfa838-ff54-40fe-bd0d-52379fec873b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.390466] env[68437]: DEBUG nova.scheduler.client.report [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 862.452512] env[68437]: DEBUG nova.network.neutron [req-1d0653e3-e31d-437c-844c-e32facab14c4 req-5fdf9855-34cd-44ea-98a6-a601f344cbeb service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updated VIF entry in instance network info cache for port c09d45df-fef7-4b7f-ac2b-cea270301ba4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 862.452922] env[68437]: DEBUG nova.network.neutron [req-1d0653e3-e31d-437c-844c-e32facab14c4 req-5fdf9855-34cd-44ea-98a6-a601f344cbeb service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updating instance_info_cache with network_info: [{"id": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "address": "fa:16:3e:ec:45:4e", "network": {"id": "68df424b-467c-4562-95aa-2c91302fe5da", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1005576802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ab0cdca3a7f4d0f8f41a2fe4f6e3d3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc09d45df-fe", "ovs_interfaceid": "c09d45df-fef7-4b7f-ac2b-cea270301ba4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.662245] env[68437]: DEBUG oslo_vmware.api [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944176, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.816324] env[68437]: DEBUG nova.network.neutron [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updated VIF entry in instance network info cache for port 7bfe6ebf-7a19-4548-afbe-c423b4ceb680. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 862.816982] env[68437]: DEBUG nova.network.neutron [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updating instance_info_cache with network_info: [{"id": "755ab792-6755-4f3f-8d83-38106672f90b", "address": "fa:16:3e:c5:b6:a0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755ab792-67", "ovs_interfaceid": "755ab792-6755-4f3f-8d83-38106672f90b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7bfe6ebf-7a19-4548-afbe-c423b4ceb680", "address": "fa:16:3e:a1:9a:22", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7bfe6ebf-7a", "ovs_interfaceid": "7bfe6ebf-7a19-4548-afbe-c423b4ceb680", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.828397] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944175, 'name': ReconfigVM_Task, 'duration_secs': 1.285095} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.829316] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Reconfigured VM instance instance-00000032 to attach disk [datastore1] fc62ff9d-1bd8-4b32-9e71-41410276802d/fc62ff9d-1bd8-4b32-9e71-41410276802d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 862.829955] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55ab2515-2eaf-4a0f-9567-8b127490e6fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.836717] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 862.836717] env[68437]: value = "task-2944177" [ 862.836717] env[68437]: _type = "Task" [ 862.836717] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.845349] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944177, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.897189] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.897986] env[68437]: DEBUG nova.compute.manager [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 862.900530] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.098s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.900530] env[68437]: DEBUG nova.objects.instance [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lazy-loading 'resources' on Instance uuid f1230046-d368-40ee-b1fa-99df4ab15a10 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 862.956343] env[68437]: DEBUG oslo_concurrency.lockutils [req-1d0653e3-e31d-437c-844c-e32facab14c4 req-5fdf9855-34cd-44ea-98a6-a601f344cbeb service nova] Releasing lock "refresh_cache-39c532b1-b05e-4354-ad8f-9223b06e9488" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.159783] env[68437]: DEBUG oslo_vmware.api [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944176, 'name': ReconfigVM_Task, 'duration_secs': 0.691666} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.160311] env[68437]: DEBUG oslo_concurrency.lockutils [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.160534] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Reconfigured VM to attach interface {{(pid=68437) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 863.323296] env[68437]: DEBUG oslo_concurrency.lockutils [req-6016e4eb-1ace-4f8e-bf25-2f1d5565d496 req-6aeb37ee-f2e1-4d73-81e7-6b6be37f472a service nova] Releasing lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.349024] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944177, 'name': Rename_Task, 'duration_secs': 0.1526} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.349024] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 863.349024] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a57e7c79-45ba-4e9f-978c-3c386685f19c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.354441] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 863.354441] env[68437]: value = "task-2944178" [ 863.354441] env[68437]: _type = "Task" [ 863.354441] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.366162] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944178, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.406279] env[68437]: DEBUG nova.compute.utils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 863.412071] env[68437]: DEBUG nova.compute.manager [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 863.412425] env[68437]: DEBUG nova.network.neutron [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 863.503072] env[68437]: DEBUG nova.policy [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5f45026cc544e518c18567213ee1765', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da9ab48f0bb046118f57da53a31c3a68', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 863.512522] env[68437]: DEBUG nova.network.neutron [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Successfully updated port: 7a0fc670-7d4b-404c-bd1b-73f31da1c3d4 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.666621] env[68437]: DEBUG oslo_concurrency.lockutils [None req-006c6896-7e93-43d9-b17c-44566d6e8251 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-27c18765-38cf-41d6-9139-9acffa94fbe6-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.115s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.848131] env[68437]: DEBUG nova.network.neutron [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Successfully created port: e418a4fd-833a-491d-a41e-250b7b2fe338 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.871303] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944178, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.915182] env[68437]: DEBUG nova.compute.manager [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 864.027831] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "refresh_cache-ba0d8067-a617-4910-b2f6-33a7be461f8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.027997] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "refresh_cache-ba0d8067-a617-4910-b2f6-33a7be461f8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.028533] env[68437]: DEBUG nova.network.neutron [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 864.050824] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ba91e3-2347-4b2b-b5e4-52dac3ad859b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.059070] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a845b3c-4785-4449-b5f4-678150cb3350 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.095526] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240455a0-3977-4b6c-95b9-f8cec2eb2872 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.103255] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40567930-c079-4379-9fde-888b7a571b52 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.116840] env[68437]: DEBUG nova.compute.provider_tree [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.121812] env[68437]: DEBUG nova.compute.manager [req-0e137917-87f0-4250-870f-95c8c1db4ccf req-f3e5c68c-20fa-46f6-ae33-b73cac0cfe04 service nova] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Received event network-vif-plugged-7a0fc670-7d4b-404c-bd1b-73f31da1c3d4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 864.123396] env[68437]: DEBUG oslo_concurrency.lockutils [req-0e137917-87f0-4250-870f-95c8c1db4ccf req-f3e5c68c-20fa-46f6-ae33-b73cac0cfe04 service nova] Acquiring lock "ba0d8067-a617-4910-b2f6-33a7be461f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.123654] env[68437]: DEBUG oslo_concurrency.lockutils [req-0e137917-87f0-4250-870f-95c8c1db4ccf req-f3e5c68c-20fa-46f6-ae33-b73cac0cfe04 service nova] Lock "ba0d8067-a617-4910-b2f6-33a7be461f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.123834] env[68437]: DEBUG oslo_concurrency.lockutils [req-0e137917-87f0-4250-870f-95c8c1db4ccf req-f3e5c68c-20fa-46f6-ae33-b73cac0cfe04 service nova] Lock "ba0d8067-a617-4910-b2f6-33a7be461f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.124035] env[68437]: DEBUG nova.compute.manager [req-0e137917-87f0-4250-870f-95c8c1db4ccf req-f3e5c68c-20fa-46f6-ae33-b73cac0cfe04 service nova] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] No waiting events found dispatching network-vif-plugged-7a0fc670-7d4b-404c-bd1b-73f31da1c3d4 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 864.124235] env[68437]: WARNING nova.compute.manager [req-0e137917-87f0-4250-870f-95c8c1db4ccf req-f3e5c68c-20fa-46f6-ae33-b73cac0cfe04 service nova] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Received unexpected event network-vif-plugged-7a0fc670-7d4b-404c-bd1b-73f31da1c3d4 for instance with vm_state building and task_state spawning. [ 864.368520] env[68437]: DEBUG oslo_vmware.api [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944178, 'name': PowerOnVM_Task, 'duration_secs': 0.595317} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.368831] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 864.369493] env[68437]: INFO nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Took 9.10 seconds to spawn the instance on the hypervisor. [ 864.369493] env[68437]: DEBUG nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 864.370118] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b2d0a4-bfe8-4a5f-8f0f-e2dee8dc1f4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.581814] env[68437]: DEBUG nova.network.neutron [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 864.625630] env[68437]: DEBUG nova.scheduler.client.report [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.726083] env[68437]: DEBUG nova.network.neutron [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Updating instance_info_cache with network_info: [{"id": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "address": "fa:16:3e:46:29:ca", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0fc670-7d", "ovs_interfaceid": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.891588] env[68437]: INFO nova.compute.manager [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Took 41.45 seconds to build instance. [ 864.926395] env[68437]: DEBUG nova.compute.manager [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 864.952771] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 864.953035] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.953354] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 864.953418] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.953571] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 864.953726] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 864.953931] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 864.954212] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 864.954407] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 864.954473] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 864.955170] env[68437]: DEBUG nova.virt.hardware [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 864.955506] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b755254f-8329-4bdf-84b0-e138da09cefb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.964200] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29a514b-9f28-4139-89d9-24ea355335fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.138128] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.235s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.138497] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.565s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.140782] env[68437]: INFO nova.compute.claims [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.167588] env[68437]: INFO nova.scheduler.client.report [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Deleted allocations for instance f1230046-d368-40ee-b1fa-99df4ab15a10 [ 865.229375] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "refresh_cache-ba0d8067-a617-4910-b2f6-33a7be461f8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.229863] env[68437]: DEBUG nova.compute.manager [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Instance network_info: |[{"id": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "address": "fa:16:3e:46:29:ca", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0fc670-7d", "ovs_interfaceid": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 865.230538] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:29:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a0fc670-7d4b-404c-bd1b-73f31da1c3d4', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.240426] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 865.240741] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.240978] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57cb0cab-37ac-432c-a74c-fa1dcbcefd93 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.264061] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.264061] env[68437]: value = "task-2944179" [ 865.264061] env[68437]: _type = "Task" [ 865.264061] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.274144] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944179, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.395562] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f8551b4b-5a8d-4f88-bcc0-3e9772e0d0b7 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "fc62ff9d-1bd8-4b32-9e71-41410276802d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.497s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.582301] env[68437]: DEBUG nova.network.neutron [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Successfully updated port: e418a4fd-833a-491d-a41e-250b7b2fe338 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 865.617125] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.617445] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.617659] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.617915] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.618126] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.621076] env[68437]: INFO nova.compute.manager [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Terminating instance [ 865.674863] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31c303a2-6de9-49cd-b5af-4b6447a38dd0 tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "f1230046-d368-40ee-b1fa-99df4ab15a10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.536s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.773939] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944179, 'name': CreateVM_Task, 'duration_secs': 0.337569} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.774319] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.775033] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.775220] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.775530] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 865.775787] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3afda73b-958e-40bf-916e-862060a7337f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.781135] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 865.781135] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520cfe2f-7503-ec4d-be8c-c47c642016d8" [ 865.781135] env[68437]: _type = "Task" [ 865.781135] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.789388] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520cfe2f-7503-ec4d-be8c-c47c642016d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.823571] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "fc62ff9d-1bd8-4b32-9e71-41410276802d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.823839] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "fc62ff9d-1bd8-4b32-9e71-41410276802d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.824128] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "fc62ff9d-1bd8-4b32-9e71-41410276802d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.824333] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "fc62ff9d-1bd8-4b32-9e71-41410276802d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.824514] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "fc62ff9d-1bd8-4b32-9e71-41410276802d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.826723] env[68437]: INFO nova.compute.manager [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Terminating instance [ 865.902101] env[68437]: DEBUG nova.compute.manager [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 865.904928] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "interface-27c18765-38cf-41d6-9139-9acffa94fbe6-7bfe6ebf-7a19-4548-afbe-c423b4ceb680" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.905178] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-27c18765-38cf-41d6-9139-9acffa94fbe6-7bfe6ebf-7a19-4548-afbe-c423b4ceb680" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.086010] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquiring lock "refresh_cache-2a0772bf-ce23-4579-9bea-7e706a80cd4d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.086372] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquired lock "refresh_cache-2a0772bf-ce23-4579-9bea-7e706a80cd4d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.086372] env[68437]: DEBUG nova.network.neutron [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 866.125070] env[68437]: DEBUG nova.compute.manager [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 866.125317] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 866.126349] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832ce0a6-f41c-415f-b052-3c3e7a39d0c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.134787] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 866.135705] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46b9b889-79c0-4055-aec5-24b1bbf208e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.143668] env[68437]: DEBUG oslo_vmware.api [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 866.143668] env[68437]: value = "task-2944180" [ 866.143668] env[68437]: _type = "Task" [ 866.143668] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.158175] env[68437]: DEBUG oslo_vmware.api [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944180, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.162179] env[68437]: DEBUG nova.compute.manager [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Received event network-changed-7a0fc670-7d4b-404c-bd1b-73f31da1c3d4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 866.162301] env[68437]: DEBUG nova.compute.manager [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Refreshing instance network info cache due to event network-changed-7a0fc670-7d4b-404c-bd1b-73f31da1c3d4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 866.162634] env[68437]: DEBUG oslo_concurrency.lockutils [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] Acquiring lock "refresh_cache-ba0d8067-a617-4910-b2f6-33a7be461f8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.162634] env[68437]: DEBUG oslo_concurrency.lockutils [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] Acquired lock "refresh_cache-ba0d8067-a617-4910-b2f6-33a7be461f8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.162790] env[68437]: DEBUG nova.network.neutron [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Refreshing network info cache for port 7a0fc670-7d4b-404c-bd1b-73f31da1c3d4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 866.299696] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520cfe2f-7503-ec4d-be8c-c47c642016d8, 'name': SearchDatastore_Task, 'duration_secs': 0.010041} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.302807] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.303073] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.303317] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.303475] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.303663] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.304731] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b39d7823-8731-4d47-b1de-c85324c56ff6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.330649] env[68437]: DEBUG nova.compute.manager [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 866.330787] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 866.332909] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479ffb7a-3f91-4b78-b601-b9b5a615ab27 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.335900] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.336158] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.337150] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70b4537a-e3af-45a4-8c12-f89d239536bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.347120] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 866.347120] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529bcc5b-1d40-8269-cdcc-a07c09b5556f" [ 866.347120] env[68437]: _type = "Task" [ 866.347120] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.347270] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 866.347446] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-349e8051-a30a-4743-b9a9-d8b3f758b2f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.361049] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529bcc5b-1d40-8269-cdcc-a07c09b5556f, 'name': SearchDatastore_Task, 'duration_secs': 0.010401} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.361369] env[68437]: DEBUG oslo_vmware.api [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 866.361369] env[68437]: value = "task-2944181" [ 866.361369] env[68437]: _type = "Task" [ 866.361369] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.368631] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d5f4d9-d050-43ee-8339-534080811821 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.379959] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 866.379959] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52038776-96e0-d237-b434-12a764fe1400" [ 866.379959] env[68437]: _type = "Task" [ 866.379959] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.382869] env[68437]: DEBUG oslo_vmware.api [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944181, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.395861] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52038776-96e0-d237-b434-12a764fe1400, 'name': SearchDatastore_Task, 'duration_secs': 0.010189} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.397353] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.397655] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ba0d8067-a617-4910-b2f6-33a7be461f8e/ba0d8067-a617-4910-b2f6-33a7be461f8e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 866.400140] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b06d4afc-ff19-46e8-82ed-f4a9bae1eb16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.409939] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.409939] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.414570] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb50d84-ca44-4834-9766-0851bbcac5dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.418284] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 866.418284] env[68437]: value = "task-2944182" [ 866.418284] env[68437]: _type = "Task" [ 866.418284] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.442599] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.443685] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ac2606-07ef-42a2-9092-7284eba5af2c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.451474] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944182, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.476042] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Reconfiguring VM to detach interface {{(pid=68437) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 866.478923] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48c7407a-16ee-4a58-ab5a-c0b40c57e271 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.499644] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 866.499644] env[68437]: value = "task-2944183" [ 866.499644] env[68437]: _type = "Task" [ 866.499644] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.508053] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.659939] env[68437]: DEBUG oslo_vmware.api [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944180, 'name': PowerOffVM_Task, 'duration_secs': 0.221053} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.662054] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 866.662277] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 866.662825] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2201e89a-15cc-4473-8a50-59be3889fae1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.688440] env[68437]: DEBUG nova.network.neutron [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 866.737117] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 866.737872] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 866.737872] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleting the datastore file [datastore2] ed1a81fd-dd4b-4126-96de-3c3f67cdca31 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.738094] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f09fdc45-53df-41c1-b774-5e5972d9ce05 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.751977] env[68437]: DEBUG oslo_vmware.api [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 866.751977] env[68437]: value = "task-2944185" [ 866.751977] env[68437]: _type = "Task" [ 866.751977] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.765781] env[68437]: DEBUG oslo_vmware.api [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.841377] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446718f1-a085-4903-a3d6-a7547bc915b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.851726] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b231059-4fbc-43ec-b037-f3987d9d7772 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.892098] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db41d98-5bfa-4389-9677-065d9043c923 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.908278] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "9a7c248f-5262-4f03-aace-f22c4976bb0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.908278] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.908278] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "9a7c248f-5262-4f03-aace-f22c4976bb0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.908278] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.908278] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.908278] env[68437]: DEBUG oslo_vmware.api [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944181, 'name': PowerOffVM_Task, 'duration_secs': 0.206772} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.908278] env[68437]: INFO nova.compute.manager [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Terminating instance [ 866.911508] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f905f2-baf3-49fa-92eb-c355f5066b02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.916632] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 866.916632] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 866.919875] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d61e32b8-39c9-420d-acd3-131a8f219c8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.933700] env[68437]: DEBUG nova.compute.provider_tree [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.945097] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944182, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49331} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.949025] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ba0d8067-a617-4910-b2f6-33a7be461f8e/ba0d8067-a617-4910-b2f6-33a7be461f8e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 866.949025] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.949025] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-347edf30-b4d1-4e9d-a648-d59fefe0f4b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.953596] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 866.953596] env[68437]: value = "task-2944187" [ 866.953596] env[68437]: _type = "Task" [ 866.953596] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.964761] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944187, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.987255] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 866.987537] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 866.987770] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleting the datastore file [datastore1] fc62ff9d-1bd8-4b32-9e71-41410276802d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.988074] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3adeb9f-83aa-4dd2-a26f-ea9856c23fe0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.994760] env[68437]: DEBUG oslo_vmware.api [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for the task: (returnval){ [ 866.994760] env[68437]: value = "task-2944188" [ 866.994760] env[68437]: _type = "Task" [ 866.994760] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.003043] env[68437]: DEBUG oslo_vmware.api [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944188, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.020298] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.055660] env[68437]: DEBUG nova.network.neutron [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Updating instance_info_cache with network_info: [{"id": "e418a4fd-833a-491d-a41e-250b7b2fe338", "address": "fa:16:3e:f1:ef:1d", "network": {"id": "7dab156b-849b-4180-934d-1bea140a05d4", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-593068485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da9ab48f0bb046118f57da53a31c3a68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape418a4fd-83", "ovs_interfaceid": "e418a4fd-833a-491d-a41e-250b7b2fe338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.100564] env[68437]: DEBUG nova.network.neutron [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Updated VIF entry in instance network info cache for port 7a0fc670-7d4b-404c-bd1b-73f31da1c3d4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 867.101048] env[68437]: DEBUG nova.network.neutron [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Updating instance_info_cache with network_info: [{"id": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "address": "fa:16:3e:46:29:ca", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0fc670-7d", "ovs_interfaceid": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.262966] env[68437]: DEBUG oslo_vmware.api [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26984} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.263284] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.263474] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 867.263656] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 867.263834] env[68437]: INFO nova.compute.manager [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Took 1.14 seconds to destroy the instance on the hypervisor. [ 867.264094] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.264292] env[68437]: DEBUG nova.compute.manager [-] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 867.264678] env[68437]: DEBUG nova.network.neutron [-] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 867.423023] env[68437]: DEBUG nova.compute.manager [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 867.423023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 867.423023] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687b2529-decf-4a68-8f71-3ca8c6101702 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.431478] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 867.431858] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e76b27b5-0392-4711-ba1b-410feca2293d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.440017] env[68437]: DEBUG nova.scheduler.client.report [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 867.444285] env[68437]: DEBUG oslo_vmware.api [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 867.444285] env[68437]: value = "task-2944189" [ 867.444285] env[68437]: _type = "Task" [ 867.444285] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.455821] env[68437]: DEBUG oslo_vmware.api [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2944189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.465789] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944187, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068691} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.469996] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.469996] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e675e6-6d99-4a41-9483-9eda73907f5d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.492069] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] ba0d8067-a617-4910-b2f6-33a7be461f8e/ba0d8067-a617-4910-b2f6-33a7be461f8e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.492726] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0ae1046-cea9-4269-95e6-05e2e09bfdac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.526204] env[68437]: DEBUG oslo_vmware.api [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Task: {'id': task-2944188, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155926} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.529444] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.529582] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 867.529730] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 867.529938] env[68437]: INFO nova.compute.manager [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Took 1.20 seconds to destroy the instance on the hypervisor. [ 867.531285] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.531285] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 867.531285] env[68437]: value = "task-2944190" [ 867.531285] env[68437]: _type = "Task" [ 867.531285] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.531285] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.531285] env[68437]: DEBUG nova.compute.manager [-] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 867.531285] env[68437]: DEBUG nova.network.neutron [-] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 867.543033] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944190, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.559535] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Releasing lock "refresh_cache-2a0772bf-ce23-4579-9bea-7e706a80cd4d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.560064] env[68437]: DEBUG nova.compute.manager [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Instance network_info: |[{"id": "e418a4fd-833a-491d-a41e-250b7b2fe338", "address": "fa:16:3e:f1:ef:1d", "network": {"id": "7dab156b-849b-4180-934d-1bea140a05d4", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-593068485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da9ab48f0bb046118f57da53a31c3a68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape418a4fd-83", "ovs_interfaceid": "e418a4fd-833a-491d-a41e-250b7b2fe338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 867.560531] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:ef:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e418a4fd-833a-491d-a41e-250b7b2fe338', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.572198] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Creating folder: Project (da9ab48f0bb046118f57da53a31c3a68). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.573128] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf4b710f-3149-45dc-bc72-2258ead7cf46 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.583378] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Created folder: Project (da9ab48f0bb046118f57da53a31c3a68) in parent group-v590848. [ 867.584015] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Creating folder: Instances. Parent ref: group-v590985. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.587312] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47a2cae0-7b85-4c3a-93df-91e1df79f856 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.598023] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Created folder: Instances in parent group-v590985. [ 867.598854] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.598854] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.598854] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb460af6-b55c-4820-9842-8fe7ead209ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.614874] env[68437]: DEBUG oslo_concurrency.lockutils [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] Releasing lock "refresh_cache-ba0d8067-a617-4910-b2f6-33a7be461f8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.615206] env[68437]: DEBUG nova.compute.manager [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Received event network-vif-plugged-e418a4fd-833a-491d-a41e-250b7b2fe338 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 867.615433] env[68437]: DEBUG oslo_concurrency.lockutils [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] Acquiring lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.615657] env[68437]: DEBUG oslo_concurrency.lockutils [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] Lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.615837] env[68437]: DEBUG oslo_concurrency.lockutils [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] Lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.616029] env[68437]: DEBUG nova.compute.manager [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] No waiting events found dispatching network-vif-plugged-e418a4fd-833a-491d-a41e-250b7b2fe338 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 867.616739] env[68437]: WARNING nova.compute.manager [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Received unexpected event network-vif-plugged-e418a4fd-833a-491d-a41e-250b7b2fe338 for instance with vm_state building and task_state spawning. [ 867.616739] env[68437]: DEBUG nova.compute.manager [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Received event network-changed-e418a4fd-833a-491d-a41e-250b7b2fe338 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 867.616739] env[68437]: DEBUG nova.compute.manager [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Refreshing instance network info cache due to event network-changed-e418a4fd-833a-491d-a41e-250b7b2fe338. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 867.616739] env[68437]: DEBUG oslo_concurrency.lockutils [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] Acquiring lock "refresh_cache-2a0772bf-ce23-4579-9bea-7e706a80cd4d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.617641] env[68437]: DEBUG oslo_concurrency.lockutils [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] Acquired lock "refresh_cache-2a0772bf-ce23-4579-9bea-7e706a80cd4d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.617641] env[68437]: DEBUG nova.network.neutron [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Refreshing network info cache for port e418a4fd-833a-491d-a41e-250b7b2fe338 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 867.622728] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.622728] env[68437]: value = "task-2944193" [ 867.622728] env[68437]: _type = "Task" [ 867.622728] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.632309] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944193, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.947731] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.809s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.948328] env[68437]: DEBUG nova.compute.manager [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 867.951422] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.536s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.956109] env[68437]: INFO nova.compute.claims [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.966137] env[68437]: DEBUG oslo_vmware.api [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2944189, 'name': PowerOffVM_Task, 'duration_secs': 0.301982} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.966442] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 867.966605] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 867.966861] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b24c3979-8626-41b3-9edc-8b4b12dd3130 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.022925] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.026986] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 868.027219] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 868.027407] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Deleting the datastore file [datastore2] 9a7c248f-5262-4f03-aace-f22c4976bb0f {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 868.027659] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b34a275-f149-4dc2-b3e0-604dca2edb8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.034190] env[68437]: DEBUG oslo_vmware.api [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for the task: (returnval){ [ 868.034190] env[68437]: value = "task-2944195" [ 868.034190] env[68437]: _type = "Task" [ 868.034190] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.045730] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944190, 'name': ReconfigVM_Task, 'duration_secs': 0.366727} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.051191] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Reconfigured VM instance instance-00000033 to attach disk [datastore1] ba0d8067-a617-4910-b2f6-33a7be461f8e/ba0d8067-a617-4910-b2f6-33a7be461f8e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 868.051191] env[68437]: DEBUG oslo_vmware.api [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2944195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.051191] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1cd6c73f-0f95-45d7-9840-e9966aa9bfac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.056465] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 868.056465] env[68437]: value = "task-2944196" [ 868.056465] env[68437]: _type = "Task" [ 868.056465] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.064488] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944196, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.136982] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944193, 'name': CreateVM_Task, 'duration_secs': 0.431025} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.140050] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.141328] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.141559] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.141969] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 868.142291] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3362caf8-d04b-4f3e-aea4-66a16c5a1c1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.149773] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 868.149773] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e98463-3e19-cfb9-a42e-25fa2bee01da" [ 868.149773] env[68437]: _type = "Task" [ 868.149773] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.161730] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e98463-3e19-cfb9-a42e-25fa2bee01da, 'name': SearchDatastore_Task, 'duration_secs': 0.009838} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.165721] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.165968] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.166282] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.166456] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.166566] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.166858] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e75ef40-5f4c-47d9-a83d-43252fab6389 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.177339] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.177541] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.178358] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18b5b452-c56c-486c-a604-4a055048f937 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.184553] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 868.184553] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524451eb-be34-6f21-6025-be72c8273760" [ 868.184553] env[68437]: _type = "Task" [ 868.184553] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.193047] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524451eb-be34-6f21-6025-be72c8273760, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.218859] env[68437]: DEBUG nova.network.neutron [-] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.247853] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.248218] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.348488] env[68437]: DEBUG nova.network.neutron [-] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.462322] env[68437]: DEBUG nova.compute.utils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 868.468576] env[68437]: DEBUG nova.compute.manager [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 868.468781] env[68437]: DEBUG nova.network.neutron [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 868.527277] env[68437]: DEBUG nova.compute.manager [req-5bfb5cfd-827d-4207-96ac-76121cf1c833 req-c8ce1d47-64b7-4f64-8b05-2b8fc0783121 service nova] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Received event network-vif-deleted-b47860d3-efcd-4110-8153-6bc15940189c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 868.527277] env[68437]: DEBUG nova.compute.manager [req-5bfb5cfd-827d-4207-96ac-76121cf1c833 req-c8ce1d47-64b7-4f64-8b05-2b8fc0783121 service nova] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Received event network-vif-deleted-dcc5d60a-dad1-435b-8b40-7f436bfd157b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 868.533416] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.538185] env[68437]: DEBUG nova.policy [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b57f48bbefbe436fb84cf86752a7cfab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03cd64940cc64e7baceabbc7983889df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 868.549589] env[68437]: DEBUG oslo_vmware.api [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Task: {'id': task-2944195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149626} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.549882] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 868.550056] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 868.550241] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 868.550415] env[68437]: INFO nova.compute.manager [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 868.550654] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 868.550849] env[68437]: DEBUG nova.compute.manager [-] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 868.550944] env[68437]: DEBUG nova.network.neutron [-] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 868.565156] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944196, 'name': Rename_Task, 'duration_secs': 0.15215} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.565805] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.565805] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4bf817f7-d2e3-4941-b2e8-ed51e8b02513 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.571867] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 868.571867] env[68437]: value = "task-2944197" [ 868.571867] env[68437]: _type = "Task" [ 868.571867] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.579855] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944197, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.609202] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "4254002c-d292-4f10-a3d0-387853dbbcb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.609426] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "4254002c-d292-4f10-a3d0-387853dbbcb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.658420] env[68437]: DEBUG nova.network.neutron [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Updated VIF entry in instance network info cache for port e418a4fd-833a-491d-a41e-250b7b2fe338. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 868.658791] env[68437]: DEBUG nova.network.neutron [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Updating instance_info_cache with network_info: [{"id": "e418a4fd-833a-491d-a41e-250b7b2fe338", "address": "fa:16:3e:f1:ef:1d", "network": {"id": "7dab156b-849b-4180-934d-1bea140a05d4", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-593068485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da9ab48f0bb046118f57da53a31c3a68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape418a4fd-83", "ovs_interfaceid": "e418a4fd-833a-491d-a41e-250b7b2fe338", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.695927] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524451eb-be34-6f21-6025-be72c8273760, 'name': SearchDatastore_Task, 'duration_secs': 0.013943} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.696731] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c30d5da-cd12-43c0-b890-8f6d45ff47d6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.702980] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 868.702980] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52388fe1-8365-30e1-4f7d-b029df7c24ce" [ 868.702980] env[68437]: _type = "Task" [ 868.702980] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.712899] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52388fe1-8365-30e1-4f7d-b029df7c24ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.721539] env[68437]: INFO nova.compute.manager [-] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Took 1.46 seconds to deallocate network for instance. [ 868.850644] env[68437]: INFO nova.compute.manager [-] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Took 1.32 seconds to deallocate network for instance. [ 868.887141] env[68437]: DEBUG nova.network.neutron [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Successfully created port: b15f4c4a-122d-4231-be11-a7d9e18e59ed {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.903054] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.903054] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.969810] env[68437]: DEBUG nova.compute.utils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 869.032646] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "221fcaf9-e17a-4594-90be-9dd49e7df424" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.033091] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.033417] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.053370] env[68437]: DEBUG nova.compute.manager [req-19e42c72-4d06-4d04-931a-9c980d7d710f req-542bb709-e5f5-4966-8812-f64627e43c32 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Received event network-vif-deleted-ed35f15a-aaef-467f-9f0a-437e412e5bb4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 869.053370] env[68437]: INFO nova.compute.manager [req-19e42c72-4d06-4d04-931a-9c980d7d710f req-542bb709-e5f5-4966-8812-f64627e43c32 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Neutron deleted interface ed35f15a-aaef-467f-9f0a-437e412e5bb4; detaching it from the instance and deleting it from the info cache [ 869.053370] env[68437]: DEBUG nova.network.neutron [req-19e42c72-4d06-4d04-931a-9c980d7d710f req-542bb709-e5f5-4966-8812-f64627e43c32 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.088662] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944197, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.161893] env[68437]: DEBUG oslo_concurrency.lockutils [req-b424add1-428c-43d7-8721-73ebcb54eed2 req-797a9a37-db77-4399-9e40-9e27a6638051 service nova] Releasing lock "refresh_cache-2a0772bf-ce23-4579-9bea-7e706a80cd4d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.215008] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52388fe1-8365-30e1-4f7d-b029df7c24ce, 'name': SearchDatastore_Task, 'duration_secs': 0.030335} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.215301] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.215641] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2a0772bf-ce23-4579-9bea-7e706a80cd4d/2a0772bf-ce23-4579-9bea-7e706a80cd4d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.215918] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9970cd8a-ab63-4aab-8a9a-093b9b4a6406 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.222212] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 869.222212] env[68437]: value = "task-2944198" [ 869.222212] env[68437]: _type = "Task" [ 869.222212] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.228072] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.231076] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.363755] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.415209] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.415463] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.416105] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.416105] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.416105] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.416105] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.416533] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 869.416533] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.434298] env[68437]: DEBUG nova.network.neutron [-] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.472271] env[68437]: DEBUG nova.compute.manager [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 869.524296] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b75615-91ec-4b61-ae34-90f0304c63b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.533126] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.535970] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d774b32-970f-49db-a13e-1335d5535098 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.566415] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-199e8051-b27b-4833-b415-e7bb19d3a251 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.569034] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b190df9-c6c0-4233-a6a2-bdf58a43d9e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.579495] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845ee43a-5323-436b-9ae2-fa39d64d6a79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.587764] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac77b6c0-65e3-414d-95ec-a8e043e0bad1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.601949] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944197, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.614406] env[68437]: DEBUG nova.compute.provider_tree [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.630379] env[68437]: DEBUG nova.compute.manager [req-19e42c72-4d06-4d04-931a-9c980d7d710f req-542bb709-e5f5-4966-8812-f64627e43c32 service nova] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Detach interface failed, port_id=ed35f15a-aaef-467f-9f0a-437e412e5bb4, reason: Instance 9a7c248f-5262-4f03-aace-f22c4976bb0f could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 869.733034] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488924} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.733325] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2a0772bf-ce23-4579-9bea-7e706a80cd4d/2a0772bf-ce23-4579-9bea-7e706a80cd4d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.733548] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.733808] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b5b4385-7f61-4cd3-92f5-59c4e0da6930 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.741507] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 869.741507] env[68437]: value = "task-2944199" [ 869.741507] env[68437]: _type = "Task" [ 869.741507] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.748574] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944199, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.918942] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.936716] env[68437]: INFO nova.compute.manager [-] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Took 1.39 seconds to deallocate network for instance. [ 870.029304] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.085867] env[68437]: DEBUG oslo_vmware.api [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944197, 'name': PowerOnVM_Task, 'duration_secs': 1.495678} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.086155] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.086366] env[68437]: INFO nova.compute.manager [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Took 7.77 seconds to spawn the instance on the hypervisor. [ 870.086594] env[68437]: DEBUG nova.compute.manager [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 870.087407] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc5eef0-ab16-49fd-8896-2c9f0230f597 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.132125] env[68437]: DEBUG nova.scheduler.client.report [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 870.251692] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944199, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065154} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.251958] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 870.252730] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d8ab0d-52e0-4a17-bcba-da55bd84bcb4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.273954] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 2a0772bf-ce23-4579-9bea-7e706a80cd4d/2a0772bf-ce23-4579-9bea-7e706a80cd4d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 870.274225] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e9940ec-d926-4187-a491-ea5e602ac2be {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.293734] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 870.293734] env[68437]: value = "task-2944200" [ 870.293734] env[68437]: _type = "Task" [ 870.293734] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.301466] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944200, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.376814] env[68437]: DEBUG nova.network.neutron [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Successfully updated port: b15f4c4a-122d-4231-be11-a7d9e18e59ed {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 870.442992] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.481880] env[68437]: DEBUG nova.compute.manager [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 870.507180] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:38:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='559911056',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1372055635',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 870.507443] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.507606] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 870.507791] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.508028] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 870.508217] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 870.508431] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 870.508622] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 870.508790] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 870.508951] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 870.509139] env[68437]: DEBUG nova.virt.hardware [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 870.509969] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018abefd-cf16-4f74-ba9e-7c7efdb35609 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.517672] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c08dc68-087a-44c6-ad15-1a7481066d74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.536760] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.572810] env[68437]: DEBUG nova.compute.manager [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Received event network-vif-plugged-b15f4c4a-122d-4231-be11-a7d9e18e59ed {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 870.573039] env[68437]: DEBUG oslo_concurrency.lockutils [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] Acquiring lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.573257] env[68437]: DEBUG oslo_concurrency.lockutils [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] Lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.573419] env[68437]: DEBUG oslo_concurrency.lockutils [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] Lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.573588] env[68437]: DEBUG nova.compute.manager [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] No waiting events found dispatching network-vif-plugged-b15f4c4a-122d-4231-be11-a7d9e18e59ed {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 870.573756] env[68437]: WARNING nova.compute.manager [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Received unexpected event network-vif-plugged-b15f4c4a-122d-4231-be11-a7d9e18e59ed for instance with vm_state building and task_state spawning. [ 870.573918] env[68437]: DEBUG nova.compute.manager [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Received event network-changed-b15f4c4a-122d-4231-be11-a7d9e18e59ed {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 870.574089] env[68437]: DEBUG nova.compute.manager [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Refreshing instance network info cache due to event network-changed-b15f4c4a-122d-4231-be11-a7d9e18e59ed. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 870.574273] env[68437]: DEBUG oslo_concurrency.lockutils [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] Acquiring lock "refresh_cache-53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.574408] env[68437]: DEBUG oslo_concurrency.lockutils [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] Acquired lock "refresh_cache-53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.574566] env[68437]: DEBUG nova.network.neutron [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Refreshing network info cache for port b15f4c4a-122d-4231-be11-a7d9e18e59ed {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 870.603987] env[68437]: INFO nova.compute.manager [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Took 43.28 seconds to build instance. [ 870.638244] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.638244] env[68437]: DEBUG nova.compute.manager [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 870.640367] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.641446] env[68437]: DEBUG nova.objects.instance [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lazy-loading 'resources' on Instance uuid 45595615-59c0-4c59-b18c-b49a3126dbb7 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 870.805406] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944200, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.879723] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "refresh_cache-53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.031251] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.105491] env[68437]: DEBUG nova.network.neutron [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 871.107542] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bb26a4a5-74b0-4f75-b37e-a93f2ff1b248 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "ba0d8067-a617-4910-b2f6-33a7be461f8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.506s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.144031] env[68437]: DEBUG nova.compute.utils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 871.148618] env[68437]: DEBUG nova.compute.manager [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 871.195751] env[68437]: DEBUG nova.network.neutron [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.307355] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944200, 'name': ReconfigVM_Task, 'duration_secs': 0.80428} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.307640] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 2a0772bf-ce23-4579-9bea-7e706a80cd4d/2a0772bf-ce23-4579-9bea-7e706a80cd4d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.308280] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f21a2dcc-de67-4aac-ae42-2bd0903dd436 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.317837] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 871.317837] env[68437]: value = "task-2944201" [ 871.317837] env[68437]: _type = "Task" [ 871.317837] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.328078] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944201, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.487350] env[68437]: DEBUG oslo_concurrency.lockutils [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] Acquiring lock "refresh_cache-ba0d8067-a617-4910-b2f6-33a7be461f8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.487408] env[68437]: DEBUG oslo_concurrency.lockutils [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] Acquired lock "refresh_cache-ba0d8067-a617-4910-b2f6-33a7be461f8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.487591] env[68437]: DEBUG nova.network.neutron [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 871.533771] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.584245] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928ca1cf-d945-45d2-a24f-25a2838f9295 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.591796] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01158c27-bf48-4bd4-a427-c954f91d6acf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.620098] env[68437]: DEBUG nova.compute.manager [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 871.623472] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90614092-d9b1-44a5-8009-7431b4ef6e8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.630762] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbbdebc-b66b-414c-b8ce-5c7cbecba5aa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.646430] env[68437]: DEBUG nova.compute.provider_tree [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.655022] env[68437]: DEBUG nova.compute.manager [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 871.698679] env[68437]: DEBUG oslo_concurrency.lockutils [req-83f8994e-07cc-4d88-84b0-d406f04a576a req-16efb565-2e4f-45cd-9bc8-51cad7c7a3bc service nova] Releasing lock "refresh_cache-53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.699058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquired lock "refresh_cache-53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.699255] env[68437]: DEBUG nova.network.neutron [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 871.828716] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944201, 'name': Rename_Task, 'duration_secs': 0.136367} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.830090] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 871.830090] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32d27771-620a-42c8-9748-e5e9443eac79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.837594] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 871.837594] env[68437]: value = "task-2944202" [ 871.837594] env[68437]: _type = "Task" [ 871.837594] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.846513] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944202, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.033808] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.145102] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.149266] env[68437]: DEBUG nova.scheduler.client.report [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.208919] env[68437]: DEBUG nova.network.neutron [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Updating instance_info_cache with network_info: [{"id": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "address": "fa:16:3e:46:29:ca", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0fc670-7d", "ovs_interfaceid": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.243097] env[68437]: DEBUG nova.network.neutron [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 872.347028] env[68437]: DEBUG oslo_vmware.api [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944202, 'name': PowerOnVM_Task, 'duration_secs': 0.475271} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.349742] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.349972] env[68437]: INFO nova.compute.manager [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Took 7.42 seconds to spawn the instance on the hypervisor. [ 872.350191] env[68437]: DEBUG nova.compute.manager [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.350989] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe77e69-bbfc-4294-9e5b-61825f5dfabc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.533873] env[68437]: DEBUG oslo_vmware.api [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944183, 'name': ReconfigVM_Task, 'duration_secs': 5.760785} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.534582] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.534582] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Reconfigured VM to detach interface {{(pid=68437) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 872.658382] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.018s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.661940] env[68437]: DEBUG nova.compute.manager [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 872.664176] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.464s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.665664] env[68437]: INFO nova.compute.claims [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 872.703754] env[68437]: INFO nova.scheduler.client.report [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Deleted allocations for instance 45595615-59c0-4c59-b18c-b49a3126dbb7 [ 872.705691] env[68437]: DEBUG nova.network.neutron [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Updating instance_info_cache with network_info: [{"id": "b15f4c4a-122d-4231-be11-a7d9e18e59ed", "address": "fa:16:3e:65:71:01", "network": {"id": "4c17c5a2-048e-4021-8471-3f6519d65387", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-4738106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03cd64940cc64e7baceabbc7983889df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15f4c4a-12", "ovs_interfaceid": "b15f4c4a-122d-4231-be11-a7d9e18e59ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.713574] env[68437]: DEBUG oslo_concurrency.lockutils [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] Releasing lock "refresh_cache-ba0d8067-a617-4910-b2f6-33a7be461f8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.714024] env[68437]: DEBUG nova.compute.manager [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Inject network info {{(pid=68437) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 872.714024] env[68437]: DEBUG nova.compute.manager [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] network_info to inject: |[{"id": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "address": "fa:16:3e:46:29:ca", "network": {"id": "6a53ff6a-4b5b-4932-8109-abdbb9b61871", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-312015810-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "188b948736f44dfa8dd9aeb258180c58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16c6ea68-9b0e-4ac0-a484-7a9a40533017", "external-id": "nsx-vlan-transportzone-384", "segmentation_id": 384, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0fc670-7d", "ovs_interfaceid": "7a0fc670-7d4b-404c-bd1b-73f31da1c3d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 872.720082] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Reconfiguring VM instance to set the machine id {{(pid=68437) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 872.722056] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95b5e6fd-90d2-43d7-a260-5f11652b5e48 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.739277] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 872.739277] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 872.739277] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 872.739277] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 872.739680] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 872.739897] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 872.740162] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 872.740337] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 872.740510] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 872.741325] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 872.741542] env[68437]: DEBUG nova.virt.hardware [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 872.744155] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73029a2d-be08-4f9b-9616-3e226a4b0ad3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.756111] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f251a68-5999-4c48-b51d-c2774c6dfb64 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.761361] env[68437]: DEBUG oslo_vmware.api [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] Waiting for the task: (returnval){ [ 872.761361] env[68437]: value = "task-2944203" [ 872.761361] env[68437]: _type = "Task" [ 872.761361] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.780039] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 872.785852] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Creating folder: Project (b414fd9a31c74289a37a727004cb82a0). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.787268] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-573e7b42-f56f-4309-8980-aaee854b70ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.793655] env[68437]: DEBUG oslo_vmware.api [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] Task: {'id': task-2944203, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.802292] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Created folder: Project (b414fd9a31c74289a37a727004cb82a0) in parent group-v590848. [ 872.802498] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Creating folder: Instances. Parent ref: group-v590988. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.804074] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-784ef037-655b-4245-ac3a-c9acc669d1ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.812193] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Created folder: Instances in parent group-v590988. [ 872.812470] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 872.812670] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 872.812880] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b3e3772-6163-430e-9fa9-e41789ba3675 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.834313] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 872.834313] env[68437]: value = "task-2944206" [ 872.834313] env[68437]: _type = "Task" [ 872.834313] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.842666] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944206, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.871118] env[68437]: INFO nova.compute.manager [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Took 44.34 seconds to build instance. [ 872.990184] env[68437]: DEBUG nova.compute.manager [req-c7f97cf1-6b4d-4015-acfa-5375ba7ae4bc req-8134ff37-32d6-460c-89c9-93438faeb386 service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Received event network-vif-deleted-7bfe6ebf-7a19-4548-afbe-c423b4ceb680 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 872.990342] env[68437]: INFO nova.compute.manager [req-c7f97cf1-6b4d-4015-acfa-5375ba7ae4bc req-8134ff37-32d6-460c-89c9-93438faeb386 service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Neutron deleted interface 7bfe6ebf-7a19-4548-afbe-c423b4ceb680; detaching it from the instance and deleting it from the info cache [ 872.990592] env[68437]: DEBUG nova.network.neutron [req-c7f97cf1-6b4d-4015-acfa-5375ba7ae4bc req-8134ff37-32d6-460c-89c9-93438faeb386 service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updating instance_info_cache with network_info: [{"id": "755ab792-6755-4f3f-8d83-38106672f90b", "address": "fa:16:3e:c5:b6:a0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755ab792-67", "ovs_interfaceid": "755ab792-6755-4f3f-8d83-38106672f90b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.223203] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Releasing lock "refresh_cache-53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.225062] env[68437]: DEBUG nova.compute.manager [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Instance network_info: |[{"id": "b15f4c4a-122d-4231-be11-a7d9e18e59ed", "address": "fa:16:3e:65:71:01", "network": {"id": "4c17c5a2-048e-4021-8471-3f6519d65387", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-4738106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03cd64940cc64e7baceabbc7983889df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15f4c4a-12", "ovs_interfaceid": "b15f4c4a-122d-4231-be11-a7d9e18e59ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 873.225738] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c2ee9af3-27b7-4eb3-8ba0-cd64e337602e tempest-SecurityGroupsTestJSON-1133615982 tempest-SecurityGroupsTestJSON-1133615982-project-member] Lock "45595615-59c0-4c59-b18c-b49a3126dbb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.461s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.226927] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:71:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '535b175f-71d3-4226-81ab-ca253f27fedd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b15f4c4a-122d-4231-be11-a7d9e18e59ed', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.234709] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 873.235863] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.236596] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f789325-695f-410c-9308-7da678e05e3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.259536] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.259536] env[68437]: value = "task-2944207" [ 873.259536] env[68437]: _type = "Task" [ 873.259536] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.270679] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944207, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.274154] env[68437]: DEBUG oslo_vmware.api [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] Task: {'id': task-2944203, 'name': ReconfigVM_Task, 'duration_secs': 0.196046} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.274463] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-63f3722f-39a6-469c-b35e-568950e47fea tempest-ServersAdminTestJSON-1472049514 tempest-ServersAdminTestJSON-1472049514-project-admin] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Reconfigured VM instance to set the machine id {{(pid=68437) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 873.343158] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944206, 'name': CreateVM_Task, 'duration_secs': 0.304481} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.343319] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 873.343750] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.343908] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.344382] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 873.344476] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2da49033-5672-46bb-9a2a-e5023df4af71 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.349235] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 873.349235] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5273656b-d031-e1f1-fd18-dd66dfa253b6" [ 873.349235] env[68437]: _type = "Task" [ 873.349235] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.358443] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5273656b-d031-e1f1-fd18-dd66dfa253b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.373903] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1874220d-9b21-45b9-b2f0-8643f9aff556 tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.362s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.493858] env[68437]: DEBUG oslo_concurrency.lockutils [req-c7f97cf1-6b4d-4015-acfa-5375ba7ae4bc req-8134ff37-32d6-460c-89c9-93438faeb386 service nova] Acquiring lock "27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.493983] env[68437]: DEBUG oslo_concurrency.lockutils [req-c7f97cf1-6b4d-4015-acfa-5375ba7ae4bc req-8134ff37-32d6-460c-89c9-93438faeb386 service nova] Acquired lock "27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.495548] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa9598f-a9c9-4d62-b180-1fcacbce6aba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.515565] env[68437]: DEBUG oslo_concurrency.lockutils [req-c7f97cf1-6b4d-4015-acfa-5375ba7ae4bc req-8134ff37-32d6-460c-89c9-93438faeb386 service nova] Releasing lock "27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.515908] env[68437]: WARNING nova.compute.manager [req-c7f97cf1-6b4d-4015-acfa-5375ba7ae4bc req-8134ff37-32d6-460c-89c9-93438faeb386 service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Detach interface failed, port_id=7bfe6ebf-7a19-4548-afbe-c423b4ceb680, reason: No device with interface-id 7bfe6ebf-7a19-4548-afbe-c423b4ceb680 exists on VM: nova.exception.NotFound: No device with interface-id 7bfe6ebf-7a19-4548-afbe-c423b4ceb680 exists on VM [ 873.605408] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "27c18765-38cf-41d6-9139-9acffa94fbe6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.605701] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.605930] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "27c18765-38cf-41d6-9139-9acffa94fbe6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.606135] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.606307] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.608756] env[68437]: INFO nova.compute.manager [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Terminating instance [ 873.770472] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944207, 'name': CreateVM_Task, 'duration_secs': 0.334003} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.772969] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 873.773832] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.810716] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.810946] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.811122] env[68437]: DEBUG nova.network.neutron [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 873.861752] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5273656b-d031-e1f1-fd18-dd66dfa253b6, 'name': SearchDatastore_Task, 'duration_secs': 0.010149} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.862075] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.862314] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.862539] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.862698] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.862846] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.863155] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.863453] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 873.863757] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ae68252-0fc6-41b5-a084-8133f0833b9b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.866147] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bae4ce5d-8eeb-43e9-8e69-329e5d184da2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.871716] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 873.871716] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5265ed1f-8151-1d82-9148-24af24c6449c" [ 873.871716] env[68437]: _type = "Task" [ 873.871716] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.877344] env[68437]: DEBUG nova.compute.manager [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 873.880759] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.880965] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 873.885046] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c78b1de7-56f7-44cc-82be-bfed2aa48125 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.894086] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5265ed1f-8151-1d82-9148-24af24c6449c, 'name': SearchDatastore_Task, 'duration_secs': 0.009398} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.900485] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.900485] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.900485] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.903097] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 873.903097] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f67a14-72b4-38bf-544c-985270bb1f2d" [ 873.903097] env[68437]: _type = "Task" [ 873.903097] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.915710] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f67a14-72b4-38bf-544c-985270bb1f2d, 'name': SearchDatastore_Task, 'duration_secs': 0.009224} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.916736] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f45f8ef-0521-434f-b4ad-6bf5b84f6226 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.923388] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 873.923388] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523aa3e5-f614-b69d-382a-76cb38bece29" [ 873.923388] env[68437]: _type = "Task" [ 873.923388] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.936974] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523aa3e5-f614-b69d-382a-76cb38bece29, 'name': SearchDatastore_Task, 'duration_secs': 0.00931} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.937279] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.937554] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 892bf198-7d05-4995-8137-c57095c5c839/892bf198-7d05-4995-8137-c57095c5c839.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.937846] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.938064] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.938281] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1f7af3b-6b6a-4f6b-9448-991114f1c404 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.940821] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e79f73a-64dc-4907-966d-ee704922d807 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.949772] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 873.949772] env[68437]: value = "task-2944208" [ 873.949772] env[68437]: _type = "Task" [ 873.949772] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.955434] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.955434] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 873.956180] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-049db99f-4f69-42ad-9882-2243ef14ee6e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.966720] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.969641] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 873.969641] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524a19e5-df7a-1c7c-b39e-a6b8a9bd561c" [ 873.969641] env[68437]: _type = "Task" [ 873.969641] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.980631] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524a19e5-df7a-1c7c-b39e-a6b8a9bd561c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.114033] env[68437]: DEBUG nova.compute.manager [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 874.114253] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.114970] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b88d2c8-4488-421b-a85d-b7508eb5099a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.126605] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.126829] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0debf58-cb8d-4e7d-9463-93e61244a756 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.136582] env[68437]: DEBUG oslo_vmware.api [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 874.136582] env[68437]: value = "task-2944209" [ 874.136582] env[68437]: _type = "Task" [ 874.136582] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.145188] env[68437]: DEBUG oslo_vmware.api [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944209, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.244937] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquiring lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.246204] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.246204] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquiring lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.246204] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.246204] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.252742] env[68437]: INFO nova.compute.manager [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Terminating instance [ 874.305924] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bac3c1-f986-4c69-8541-3d94e2cf8ca1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.314399] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a348957f-275a-479c-97b0-f448876df924 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.353754] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7361df-6060-4f77-a8fc-7a3cb82f4b39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.361917] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ead480-fa8b-4bf2-8a28-80b612e7bb69 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.379866] env[68437]: DEBUG nova.compute.provider_tree [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.403193] env[68437]: INFO nova.compute.manager [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Rebuilding instance [ 874.414094] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.456852] env[68437]: DEBUG nova.compute.manager [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 874.457173] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94707e22-f475-4f32-9384-fac9983fb0fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.463512] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480492} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.465046] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 892bf198-7d05-4995-8137-c57095c5c839/892bf198-7d05-4995-8137-c57095c5c839.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 874.465332] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.467312] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e35c5f74-86c0-4622-b1cb-1efc967f8574 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.479609] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 874.479609] env[68437]: value = "task-2944210" [ 874.479609] env[68437]: _type = "Task" [ 874.479609] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.488960] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524a19e5-df7a-1c7c-b39e-a6b8a9bd561c, 'name': SearchDatastore_Task, 'duration_secs': 0.008339} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.490465] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-135e6df1-6a2d-4f0e-bb26-2664ce7855cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.496569] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.499545] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 874.499545] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52da3dea-4211-84d3-3bb7-94ec05489c9e" [ 874.499545] env[68437]: _type = "Task" [ 874.499545] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.507767] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52da3dea-4211-84d3-3bb7-94ec05489c9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.649281] env[68437]: DEBUG oslo_vmware.api [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944209, 'name': PowerOffVM_Task, 'duration_secs': 0.308933} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.649281] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.649281] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.649281] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-149081b0-2ec1-4409-a729-f7dffa191f99 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.723023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.723023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.723023] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleting the datastore file [datastore1] 27c18765-38cf-41d6-9139-9acffa94fbe6 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.723023] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5313bfb8-c1e0-4210-bd60-628ba7e79710 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.728047] env[68437]: DEBUG oslo_vmware.api [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 874.728047] env[68437]: value = "task-2944212" [ 874.728047] env[68437]: _type = "Task" [ 874.728047] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.735786] env[68437]: DEBUG oslo_vmware.api [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.740322] env[68437]: DEBUG nova.network.neutron [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updating instance_info_cache with network_info: [{"id": "755ab792-6755-4f3f-8d83-38106672f90b", "address": "fa:16:3e:c5:b6:a0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap755ab792-67", "ovs_interfaceid": "755ab792-6755-4f3f-8d83-38106672f90b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.758195] env[68437]: DEBUG nova.compute.manager [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 874.758325] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.759450] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c196de-8c31-4bcb-aceb-7abca26fd7f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.767041] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.767133] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc5b0d39-9c3a-4e66-ae88-7aa509bf1042 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.772487] env[68437]: DEBUG oslo_vmware.api [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 874.772487] env[68437]: value = "task-2944213" [ 874.772487] env[68437]: _type = "Task" [ 874.772487] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.780970] env[68437]: DEBUG oslo_vmware.api [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.889441] env[68437]: DEBUG nova.scheduler.client.report [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.991728] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068354} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.992372] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.993203] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3b49cf-afd2-48ab-8da6-7b6fe720f40d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.019185] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 892bf198-7d05-4995-8137-c57095c5c839/892bf198-7d05-4995-8137-c57095c5c839.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.020361] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f24b1dec-0265-49ad-9206-18bb20a01abf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.039224] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52da3dea-4211-84d3-3bb7-94ec05489c9e, 'name': SearchDatastore_Task, 'duration_secs': 0.008532} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.039716] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.039986] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c/53c4ca02-2bc3-4a55-9aea-0e0dd669a37c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 875.040263] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ceec7416-8cb4-46fb-bc64-80532015585e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.044176] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 875.044176] env[68437]: value = "task-2944214" [ 875.044176] env[68437]: _type = "Task" [ 875.044176] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.049534] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 875.049534] env[68437]: value = "task-2944215" [ 875.049534] env[68437]: _type = "Task" [ 875.049534] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.055185] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944214, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.059893] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944215, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.241115] env[68437]: DEBUG oslo_vmware.api [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146693} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.241429] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.241908] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.241908] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.242033] env[68437]: INFO nova.compute.manager [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 875.242233] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.242805] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "refresh_cache-27c18765-38cf-41d6-9139-9acffa94fbe6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.246025] env[68437]: DEBUG nova.compute.manager [-] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 875.246025] env[68437]: DEBUG nova.network.neutron [-] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 875.284281] env[68437]: DEBUG oslo_vmware.api [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944213, 'name': PowerOffVM_Task, 'duration_secs': 0.256165} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.284666] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.284909] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 875.287699] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f31c4788-f459-488e-8293-ff2f27c41abb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.395060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.731s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.395719] env[68437]: DEBUG nova.compute.manager [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 875.398306] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.893s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.400333] env[68437]: INFO nova.compute.claims [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.481570] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 875.482162] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbbf57ae-024a-4d37-ad1f-dbc6ce1ffaab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.490097] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 875.490097] env[68437]: value = "task-2944217" [ 875.490097] env[68437]: _type = "Task" [ 875.490097] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.503538] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.561260] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944214, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.564977] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944215, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.750379] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffd47d49-b218-40a7-96fc-6c99864a5423 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-27c18765-38cf-41d6-9139-9acffa94fbe6-7bfe6ebf-7a19-4548-afbe-c423b4ceb680" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.844s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.809508] env[68437]: DEBUG nova.compute.manager [req-78609fb2-0feb-4302-acf8-a65f138a3037 req-1efb4c81-4d66-47b9-afd7-c06c730b278e service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Received event network-vif-deleted-755ab792-6755-4f3f-8d83-38106672f90b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 875.809699] env[68437]: INFO nova.compute.manager [req-78609fb2-0feb-4302-acf8-a65f138a3037 req-1efb4c81-4d66-47b9-afd7-c06c730b278e service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Neutron deleted interface 755ab792-6755-4f3f-8d83-38106672f90b; detaching it from the instance and deleting it from the info cache [ 875.809891] env[68437]: DEBUG nova.network.neutron [req-78609fb2-0feb-4302-acf8-a65f138a3037 req-1efb4c81-4d66-47b9-afd7-c06c730b278e service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.911731] env[68437]: DEBUG nova.compute.utils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 875.913006] env[68437]: DEBUG nova.compute.manager [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 875.913715] env[68437]: DEBUG nova.network.neutron [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 875.972105] env[68437]: DEBUG nova.policy [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0e66fd345044e92857d742c65f537ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a36ec823128647758ca8047a5ebf1ae1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 876.005404] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944217, 'name': PowerOffVM_Task, 'duration_secs': 0.346104} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.005767] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.006059] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 876.006916] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fe927d-33b7-45eb-a9c5-43b959d2b46f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.018654] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 876.018654] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fabb68c3-e5e5-4a18-88ab-d0fa8b582258 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.054489] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944214, 'name': ReconfigVM_Task, 'duration_secs': 0.635445} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.057947] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 892bf198-7d05-4995-8137-c57095c5c839/892bf198-7d05-4995-8137-c57095c5c839.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.058616] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57559977-4294-4534-ac22-76999e8c00aa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.065523] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944215, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53687} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.066847] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c/53c4ca02-2bc3-4a55-9aea-0e0dd669a37c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 876.067110] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 876.067445] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 876.067445] env[68437]: value = "task-2944219" [ 876.067445] env[68437]: _type = "Task" [ 876.067445] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.068013] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9dbfa938-cc93-495b-8e18-76e0305fbada {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.080018] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944219, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.080018] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 876.080018] env[68437]: value = "task-2944220" [ 876.080018] env[68437]: _type = "Task" [ 876.080018] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.088808] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944220, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.255720] env[68437]: DEBUG nova.network.neutron [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Successfully created port: bba0a9a2-7033-420f-baf6-f59f37b8b8b8 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 876.290068] env[68437]: DEBUG nova.network.neutron [-] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.314319] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bf1cd0d-3f19-4b71-9556-4ae7ac83ed4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.325669] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f866e2-afce-4702-b7bb-6d38f70f727c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.364494] env[68437]: DEBUG nova.compute.manager [req-78609fb2-0feb-4302-acf8-a65f138a3037 req-1efb4c81-4d66-47b9-afd7-c06c730b278e service nova] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Detach interface failed, port_id=755ab792-6755-4f3f-8d83-38106672f90b, reason: Instance 27c18765-38cf-41d6-9139-9acffa94fbe6 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 876.422691] env[68437]: DEBUG nova.compute.manager [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 876.456241] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.456241] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.456241] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Deleting the datastore file [datastore1] 2a0772bf-ce23-4579-9bea-7e706a80cd4d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.461433] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a1cea92-4ef5-4a73-8474-a1781ab29aa7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.471741] env[68437]: DEBUG oslo_vmware.api [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for the task: (returnval){ [ 876.471741] env[68437]: value = "task-2944221" [ 876.471741] env[68437]: _type = "Task" [ 876.471741] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.473730] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 876.473956] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 876.474188] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleting the datastore file [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 876.479247] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fadd7718-f34b-4001-a5db-f1eb47c3b482 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.490515] env[68437]: DEBUG oslo_vmware.api [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944221, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.496338] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 876.496338] env[68437]: value = "task-2944222" [ 876.496338] env[68437]: _type = "Task" [ 876.496338] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.509091] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.579909] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944219, 'name': Rename_Task, 'duration_secs': 0.147851} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.580288] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.583538] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebc2832e-c5f0-4417-bbc5-011539b7c3da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.592695] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944220, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067935} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.593986] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 876.594566] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 876.594566] env[68437]: value = "task-2944223" [ 876.594566] env[68437]: _type = "Task" [ 876.594566] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.595298] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fe7a7b-5abb-4b78-8183-7faf65b3e96e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.623413] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c/53c4ca02-2bc3-4a55-9aea-0e0dd669a37c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 876.629466] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6dc26180-f8b2-45d2-994b-68891309d95c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.644559] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944223, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.649252] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 876.649252] env[68437]: value = "task-2944224" [ 876.649252] env[68437]: _type = "Task" [ 876.649252] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.657864] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944224, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.795189] env[68437]: INFO nova.compute.manager [-] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Took 1.55 seconds to deallocate network for instance. [ 876.827849] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquiring lock "6d09b9e8-f701-4548-8ec3-c1d9e69223ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.827849] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "6d09b9e8-f701-4548-8ec3-c1d9e69223ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.985893] env[68437]: DEBUG oslo_vmware.api [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Task: {'id': task-2944221, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138382} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.986208] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 876.986394] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 876.986571] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 876.986742] env[68437]: INFO nova.compute.manager [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Took 2.23 seconds to destroy the instance on the hypervisor. [ 876.986978] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 876.987185] env[68437]: DEBUG nova.compute.manager [-] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 876.987283] env[68437]: DEBUG nova.network.neutron [-] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 877.006883] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156052} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.008012] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 877.008233] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 877.008441] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 877.011397] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075a6c4e-701c-43ee-bf2f-f1cd04d4847f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.018995] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8342d5b7-13e5-4974-ad7b-9fffcf328759 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.053958] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb9e5fb-1e8e-4e75-b822-fd1939818034 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.061585] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81987502-6e1e-4f3f-b737-bcacee09130b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.076136] env[68437]: DEBUG nova.compute.provider_tree [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.108487] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944223, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.160449] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944224, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.289527] env[68437]: DEBUG nova.compute.manager [req-59a450d4-9535-4f06-a9c3-b1e9aa08fdd1 req-0be8e4c4-6664-4bfd-a96c-d336d76f811f service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Received event network-vif-deleted-e418a4fd-833a-491d-a41e-250b7b2fe338 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 877.289527] env[68437]: INFO nova.compute.manager [req-59a450d4-9535-4f06-a9c3-b1e9aa08fdd1 req-0be8e4c4-6664-4bfd-a96c-d336d76f811f service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Neutron deleted interface e418a4fd-833a-491d-a41e-250b7b2fe338; detaching it from the instance and deleting it from the info cache [ 877.289885] env[68437]: DEBUG nova.network.neutron [req-59a450d4-9535-4f06-a9c3-b1e9aa08fdd1 req-0be8e4c4-6664-4bfd-a96c-d336d76f811f service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.306371] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.431642] env[68437]: DEBUG nova.compute.manager [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 877.461998] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 877.462501] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.462766] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 877.463065] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.463332] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 877.463596] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 877.464355] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 877.464774] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 877.465186] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 877.467324] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 877.467324] env[68437]: DEBUG nova.virt.hardware [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 877.467324] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa9842b-d3a3-456d-b028-e668a5188026 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.476713] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c92fe0-2a46-45a8-923e-1378e10dd29e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.579681] env[68437]: DEBUG nova.scheduler.client.report [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 877.609493] env[68437]: DEBUG oslo_vmware.api [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944223, 'name': PowerOnVM_Task, 'duration_secs': 0.590469} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.609982] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.610393] env[68437]: INFO nova.compute.manager [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Took 4.95 seconds to spawn the instance on the hypervisor. [ 877.610717] env[68437]: DEBUG nova.compute.manager [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 877.612085] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e97f38-1c63-4d31-b9a9-af01afe910df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.663293] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944224, 'name': ReconfigVM_Task, 'duration_secs': 0.531048} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.664531] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c/53c4ca02-2bc3-4a55-9aea-0e0dd669a37c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 877.665062] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=68437) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 877.666514] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-2dd7d739-e1dd-4830-a6dc-2825cf5bb5b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.679095] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 877.679095] env[68437]: value = "task-2944225" [ 877.679095] env[68437]: _type = "Task" [ 877.679095] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.688629] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944225, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.765404] env[68437]: DEBUG nova.network.neutron [-] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.795135] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8319324-5754-45ad-9ed9-7d015c145fba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.804145] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcc7dda-ed1d-4b6b-930f-8a73678c5148 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.823708] env[68437]: DEBUG nova.network.neutron [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Successfully updated port: bba0a9a2-7033-420f-baf6-f59f37b8b8b8 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 877.843482] env[68437]: DEBUG nova.compute.manager [req-59a450d4-9535-4f06-a9c3-b1e9aa08fdd1 req-0be8e4c4-6664-4bfd-a96c-d336d76f811f service nova] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Detach interface failed, port_id=e418a4fd-833a-491d-a41e-250b7b2fe338, reason: Instance 2a0772bf-ce23-4579-9bea-7e706a80cd4d could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 877.889556] env[68437]: DEBUG nova.compute.manager [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Received event network-vif-plugged-bba0a9a2-7033-420f-baf6-f59f37b8b8b8 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 877.889794] env[68437]: DEBUG oslo_concurrency.lockutils [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] Acquiring lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.890046] env[68437]: DEBUG oslo_concurrency.lockutils [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.890225] env[68437]: DEBUG oslo_concurrency.lockutils [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.890336] env[68437]: DEBUG nova.compute.manager [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] No waiting events found dispatching network-vif-plugged-bba0a9a2-7033-420f-baf6-f59f37b8b8b8 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 877.890483] env[68437]: WARNING nova.compute.manager [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Received unexpected event network-vif-plugged-bba0a9a2-7033-420f-baf6-f59f37b8b8b8 for instance with vm_state building and task_state spawning. [ 877.890647] env[68437]: DEBUG nova.compute.manager [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Received event network-changed-bba0a9a2-7033-420f-baf6-f59f37b8b8b8 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 877.890803] env[68437]: DEBUG nova.compute.manager [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Refreshing instance network info cache due to event network-changed-bba0a9a2-7033-420f-baf6-f59f37b8b8b8. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 877.890979] env[68437]: DEBUG oslo_concurrency.lockutils [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] Acquiring lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.891310] env[68437]: DEBUG oslo_concurrency.lockutils [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] Acquired lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.891488] env[68437]: DEBUG nova.network.neutron [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Refreshing network info cache for port bba0a9a2-7033-420f-baf6-f59f37b8b8b8 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 878.040070] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 878.040343] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.040503] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 878.040683] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.040828] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 878.040973] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 878.041195] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 878.041354] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 878.041521] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 878.041682] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 878.041853] env[68437]: DEBUG nova.virt.hardware [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 878.042730] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cc226d-0971-47d1-8f71-2536389d7305 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.052232] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0c117a-2e96-4159-a1e8-25b756d0f7e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.065849] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:4e:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a915dbf0-9e3f-41da-b43b-dd0a4225b839', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.073157] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 878.073394] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.073598] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e199a8f-2ed6-492c-a616-ebdd71501223 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.088230] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.088730] env[68437]: DEBUG nova.compute.manager [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 878.092126] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 35.086s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.098803] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.098803] env[68437]: value = "task-2944226" [ 878.098803] env[68437]: _type = "Task" [ 878.098803] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.106438] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944226, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.129362] env[68437]: INFO nova.compute.manager [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Took 41.76 seconds to build instance. [ 878.189190] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944225, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.097569} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.189455] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=68437) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 878.190227] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5ab311-4fb4-486b-9355-8498cb21e34c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.215170] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c/ephemeral_0.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.215841] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-686ab15a-96f2-41c7-a92b-9ca74ecc852d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.234553] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 878.234553] env[68437]: value = "task-2944227" [ 878.234553] env[68437]: _type = "Task" [ 878.234553] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.242424] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.267178] env[68437]: INFO nova.compute.manager [-] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Took 1.28 seconds to deallocate network for instance. [ 878.327449] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.433335] env[68437]: DEBUG nova.network.neutron [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 878.518385] env[68437]: DEBUG nova.network.neutron [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.597864] env[68437]: INFO nova.compute.claims [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.604117] env[68437]: DEBUG nova.compute.utils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 878.605539] env[68437]: DEBUG nova.compute.manager [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 878.607027] env[68437]: DEBUG nova.network.neutron [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 878.618459] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944226, 'name': CreateVM_Task, 'duration_secs': 0.329032} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.619311] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.620136] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.620741] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.620741] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 878.621088] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-977968e9-feac-4031-a88a-78afd3062c52 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.626356] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 878.626356] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527fa794-bbd5-d108-8e5d-6e4c8a9206a9" [ 878.626356] env[68437]: _type = "Task" [ 878.626356] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.632152] env[68437]: DEBUG oslo_concurrency.lockutils [None req-35205b6f-3e2d-4977-ad02-1a8571f07495 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "892bf198-7d05-4995-8137-c57095c5c839" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.961s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.638084] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527fa794-bbd5-d108-8e5d-6e4c8a9206a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.646951] env[68437]: DEBUG nova.policy [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec1074dd1b444e45beadcccfe6671c76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f1c3ca0e78f472e8c127fa68ed610f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 878.746426] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944227, 'name': ReconfigVM_Task, 'duration_secs': 0.354608} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.746426] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c/ephemeral_0.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.746426] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01ddaeec-15f9-4451-ac34-7c8ebfd5c9cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.753204] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 878.753204] env[68437]: value = "task-2944228" [ 878.753204] env[68437]: _type = "Task" [ 878.753204] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.762972] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944228, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.779814] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.938604] env[68437]: DEBUG nova.network.neutron [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Successfully created port: 9260c00b-559c-42b0-8f99-ffca47f422d7 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.021751] env[68437]: DEBUG oslo_concurrency.lockutils [req-eb770b86-e39f-4d03-933b-3d7d6ab99f6c req-b8bb7fa5-e3a6-4f8a-b511-30fa2457b729 service nova] Releasing lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.021751] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.021944] env[68437]: DEBUG nova.network.neutron [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 879.107480] env[68437]: INFO nova.compute.resource_tracker [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating resource usage from migration bcb29f8b-d96e-4a85-b515-47c3a6a001dc [ 879.111288] env[68437]: DEBUG nova.compute.manager [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 879.138098] env[68437]: DEBUG nova.compute.manager [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 879.140688] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527fa794-bbd5-d108-8e5d-6e4c8a9206a9, 'name': SearchDatastore_Task, 'duration_secs': 0.01073} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.143430] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 879.143669] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.143923] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.144058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.144254] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.144878] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fadae92e-d4ec-44ac-8fc2-568629a5102a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.154804] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.154804] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.159077] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cb5d361-a243-46e0-af20-2780a0dacb4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.165842] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 879.165842] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5232c0e6-6f02-3451-2df0-42a946884cc0" [ 879.165842] env[68437]: _type = "Task" [ 879.165842] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.173516] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5232c0e6-6f02-3451-2df0-42a946884cc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.263834] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944228, 'name': Rename_Task, 'duration_secs': 0.223509} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.264211] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.264572] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7406cde4-9a4d-43a9-b5c0-fc3cf560109d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.272277] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 879.272277] env[68437]: value = "task-2944229" [ 879.272277] env[68437]: _type = "Task" [ 879.272277] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.281179] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.290393] env[68437]: INFO nova.compute.manager [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Rebuilding instance [ 879.334925] env[68437]: DEBUG nova.compute.manager [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 879.335382] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22bd563-8e95-40ab-97ae-282a006b4743 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.569958] env[68437]: DEBUG nova.network.neutron [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 879.603264] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449be791-bbac-4136-a0a2-3b5a869a7e1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.610738] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17484d8-510f-4de0-9422-0a863d5566ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.651067] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ebcb7d-1ab9-4e8a-be4a-eddee0a4655b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.661844] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4eae507-ae73-4272-bba4-2fb1ba2c579b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.666419] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.677739] env[68437]: DEBUG nova.compute.provider_tree [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.684167] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5232c0e6-6f02-3451-2df0-42a946884cc0, 'name': SearchDatastore_Task, 'duration_secs': 0.02806} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.684859] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9df526f7-56dd-4657-ba7c-64961c2dd5dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.692565] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 879.692565] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522e96e5-d685-fd46-7c0f-b8e81374dc57" [ 879.692565] env[68437]: _type = "Task" [ 879.692565] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.700762] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522e96e5-d685-fd46-7c0f-b8e81374dc57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.755200] env[68437]: DEBUG nova.network.neutron [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance_info_cache with network_info: [{"id": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "address": "fa:16:3e:87:d9:eb", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba0a9a2-70", "ovs_interfaceid": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.783505] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.120852] env[68437]: DEBUG nova.compute.manager [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 880.148379] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 880.148635] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.148890] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 880.149188] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.149421] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 880.149617] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 880.149871] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 880.150028] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 880.150198] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 880.150361] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 880.150572] env[68437]: DEBUG nova.virt.hardware [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 880.151415] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82fbe12-7e14-4098-a412-adeb9908bbd6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.159591] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f50614d-3bca-4f78-89d7-f2066014c65a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.181346] env[68437]: DEBUG nova.scheduler.client.report [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.202539] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522e96e5-d685-fd46-7c0f-b8e81374dc57, 'name': SearchDatastore_Task, 'duration_secs': 0.010833} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.203457] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.203723] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.203979] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fdcfa9d-d471-4728-b792-709032cfcf82 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.210834] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 880.210834] env[68437]: value = "task-2944230" [ 880.210834] env[68437]: _type = "Task" [ 880.210834] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.218976] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.257431] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.257758] env[68437]: DEBUG nova.compute.manager [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Instance network_info: |[{"id": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "address": "fa:16:3e:87:d9:eb", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba0a9a2-70", "ovs_interfaceid": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 880.258196] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:d9:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bba0a9a2-7033-420f-baf6-f59f37b8b8b8', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 880.265865] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.266108] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 880.266334] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99e0afc4-0b50-4edd-b461-84a2ea6e7b61 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.290407] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944229, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.292116] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 880.292116] env[68437]: value = "task-2944231" [ 880.292116] env[68437]: _type = "Task" [ 880.292116] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.298306] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944231, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.340679] env[68437]: DEBUG nova.compute.manager [req-d3c346ca-bcf6-42bf-80fc-35269431f1d0 req-2a17c17e-1dd6-49e1-83eb-f978f3ef4d06 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Received event network-vif-plugged-9260c00b-559c-42b0-8f99-ffca47f422d7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 880.340950] env[68437]: DEBUG oslo_concurrency.lockutils [req-d3c346ca-bcf6-42bf-80fc-35269431f1d0 req-2a17c17e-1dd6-49e1-83eb-f978f3ef4d06 service nova] Acquiring lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.341220] env[68437]: DEBUG oslo_concurrency.lockutils [req-d3c346ca-bcf6-42bf-80fc-35269431f1d0 req-2a17c17e-1dd6-49e1-83eb-f978f3ef4d06 service nova] Lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.341271] env[68437]: DEBUG oslo_concurrency.lockutils [req-d3c346ca-bcf6-42bf-80fc-35269431f1d0 req-2a17c17e-1dd6-49e1-83eb-f978f3ef4d06 service nova] Lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.341420] env[68437]: DEBUG nova.compute.manager [req-d3c346ca-bcf6-42bf-80fc-35269431f1d0 req-2a17c17e-1dd6-49e1-83eb-f978f3ef4d06 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] No waiting events found dispatching network-vif-plugged-9260c00b-559c-42b0-8f99-ffca47f422d7 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 880.341583] env[68437]: WARNING nova.compute.manager [req-d3c346ca-bcf6-42bf-80fc-35269431f1d0 req-2a17c17e-1dd6-49e1-83eb-f978f3ef4d06 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Received unexpected event network-vif-plugged-9260c00b-559c-42b0-8f99-ffca47f422d7 for instance with vm_state building and task_state spawning. [ 880.350537] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.350808] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8ad667a-8d29-49c1-8d72-45481e99c2e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.358055] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 880.358055] env[68437]: value = "task-2944232" [ 880.358055] env[68437]: _type = "Task" [ 880.358055] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.366076] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.437945] env[68437]: DEBUG nova.network.neutron [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Successfully updated port: 9260c00b-559c-42b0-8f99-ffca47f422d7 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.687096] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.595s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.687302] env[68437]: INFO nova.compute.manager [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Migrating [ 880.695468] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.915s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.697075] env[68437]: INFO nova.compute.claims [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.721438] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944230, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.793296] env[68437]: DEBUG oslo_vmware.api [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944229, 'name': PowerOnVM_Task, 'duration_secs': 1.234857} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.797196] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.797476] env[68437]: INFO nova.compute.manager [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Took 10.32 seconds to spawn the instance on the hypervisor. [ 880.797686] env[68437]: DEBUG nova.compute.manager [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 880.798588] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9aee31-a2a3-48a6-b48e-6e9b6ef60887 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.806281] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944231, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.866567] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944232, 'name': PowerOffVM_Task, 'duration_secs': 0.250257} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.866851] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 880.867098] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 880.867825] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782da69f-c96b-41aa-96c3-c45abda2843f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.874115] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 880.874333] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bec466c9-4958-442d-9834-fc36ad130760 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.897998] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 880.897998] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 880.898209] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Deleting the datastore file [datastore1] 892bf198-7d05-4995-8137-c57095c5c839 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.898452] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a08d6ba-96a0-4ce5-a623-c0afe357dae7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.904612] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 880.904612] env[68437]: value = "task-2944234" [ 880.904612] env[68437]: _type = "Task" [ 880.904612] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.912452] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.943316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "refresh_cache-d84c599e-29b2-45ec-a3f7-54ef85af9a3d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.943510] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "refresh_cache-d84c599e-29b2-45ec-a3f7-54ef85af9a3d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.943668] env[68437]: DEBUG nova.network.neutron [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 881.210150] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.210483] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.210678] env[68437]: DEBUG nova.network.neutron [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 881.226438] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6794} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.227163] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.227163] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.227320] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5ffb9ee-9093-4fa8-ad38-58e03fdb17b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.234568] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 881.234568] env[68437]: value = "task-2944235" [ 881.234568] env[68437]: _type = "Task" [ 881.234568] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.243076] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944235, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.303274] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944231, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.321501] env[68437]: INFO nova.compute.manager [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Took 46.79 seconds to build instance. [ 881.415728] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.486667] env[68437]: DEBUG nova.network.neutron [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 881.615186] env[68437]: DEBUG nova.network.neutron [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Updating instance_info_cache with network_info: [{"id": "9260c00b-559c-42b0-8f99-ffca47f422d7", "address": "fa:16:3e:ea:fe:62", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9260c00b-55", "ovs_interfaceid": "9260c00b-559c-42b0-8f99-ffca47f422d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.745349] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944235, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.238628} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.749603] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.750609] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6af8427-3858-4111-ad01-bacf9e36bf11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.773873] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.776524] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-404ec28e-dc92-45fb-b495-9d6817eb8b89 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.798603] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 881.798603] env[68437]: value = "task-2944236" [ 881.798603] env[68437]: _type = "Task" [ 881.798603] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.804672] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944231, 'name': CreateVM_Task, 'duration_secs': 1.444923} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.809223] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 881.809223] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.809223] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.809223] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 881.813952] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e059aab-1482-4de9-8a1c-bb6984a52cf8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.816019] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944236, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.818754] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 881.818754] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524c7d21-3e24-65c9-dee9-2837bc2115c4" [ 881.818754] env[68437]: _type = "Task" [ 881.818754] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.823568] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5bd61fbb-8851-4532-ad2e-c4c55755cb4d tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.975s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.827619] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524c7d21-3e24-65c9-dee9-2837bc2115c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.915124] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.567146} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.917453] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 881.917648] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 881.917820] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 881.929630] env[68437]: DEBUG nova.network.neutron [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance_info_cache with network_info: [{"id": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "address": "fa:16:3e:17:4e:f5", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c053c01-e5", "ovs_interfaceid": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.119195] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "refresh_cache-d84c599e-29b2-45ec-a3f7-54ef85af9a3d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.119505] env[68437]: DEBUG nova.compute.manager [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Instance network_info: |[{"id": "9260c00b-559c-42b0-8f99-ffca47f422d7", "address": "fa:16:3e:ea:fe:62", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9260c00b-55", "ovs_interfaceid": "9260c00b-559c-42b0-8f99-ffca47f422d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 882.120320] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:fe:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9260c00b-559c-42b0-8f99-ffca47f422d7', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.133806] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 882.134088] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 882.134330] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e438364e-0135-44c7-9761-3be463deed13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.161353] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.161353] env[68437]: value = "task-2944237" [ 882.161353] env[68437]: _type = "Task" [ 882.161353] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.172272] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944237, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.289696] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb700411-f9eb-4349-a8bd-0127b58ca5b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.297631] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89a5ef6-b9eb-48b5-b066-a765f5e2e4de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.309349] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944236, 'name': ReconfigVM_Task, 'duration_secs': 0.406387} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.336445] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.340774] env[68437]: DEBUG nova.compute.manager [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 882.341207] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9662f6e-e94a-490c-98b8-6e8683fa5504 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.349592] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb001f3-d879-4fb2-9af3-f19f571b4405 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.356041] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524c7d21-3e24-65c9-dee9-2837bc2115c4, 'name': SearchDatastore_Task, 'duration_secs': 0.045262} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.359018] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.359280] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.359512] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.359655] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.359829] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.360183] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 882.360183] env[68437]: value = "task-2944238" [ 882.360183] env[68437]: _type = "Task" [ 882.360183] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.360423] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-856fe0e3-d326-474f-8935-967900151654 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.365018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b35584-b23e-497e-b4ac-b8388a454d3f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.376537] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944238, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.385881] env[68437]: DEBUG nova.compute.provider_tree [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.391021] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.391021] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.391021] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baee3d15-ec2b-4bd0-a697-aafdb078240e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.396744] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 882.396744] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52315bc7-e8b3-75e3-b9e5-4a15ca3be6b5" [ 882.396744] env[68437]: _type = "Task" [ 882.396744] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.409994] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52315bc7-e8b3-75e3-b9e5-4a15ca3be6b5, 'name': SearchDatastore_Task, 'duration_secs': 0.009788} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.410852] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bda567b-d203-4898-bee6-5cff9db438a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.415984] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 882.415984] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528bf8e1-aae3-50f4-d4e6-fa42970dce96" [ 882.415984] env[68437]: _type = "Task" [ 882.415984] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.432021] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528bf8e1-aae3-50f4-d4e6-fa42970dce96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.434084] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.508898] env[68437]: DEBUG nova.compute.manager [req-f014f557-3be4-4e1d-8f56-311090bd1f03 req-7400f662-3191-4155-887c-a1fb3cafb0d5 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Received event network-changed-9260c00b-559c-42b0-8f99-ffca47f422d7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 882.509212] env[68437]: DEBUG nova.compute.manager [req-f014f557-3be4-4e1d-8f56-311090bd1f03 req-7400f662-3191-4155-887c-a1fb3cafb0d5 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Refreshing instance network info cache due to event network-changed-9260c00b-559c-42b0-8f99-ffca47f422d7. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 882.509450] env[68437]: DEBUG oslo_concurrency.lockutils [req-f014f557-3be4-4e1d-8f56-311090bd1f03 req-7400f662-3191-4155-887c-a1fb3cafb0d5 service nova] Acquiring lock "refresh_cache-d84c599e-29b2-45ec-a3f7-54ef85af9a3d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.509610] env[68437]: DEBUG oslo_concurrency.lockutils [req-f014f557-3be4-4e1d-8f56-311090bd1f03 req-7400f662-3191-4155-887c-a1fb3cafb0d5 service nova] Acquired lock "refresh_cache-d84c599e-29b2-45ec-a3f7-54ef85af9a3d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.509780] env[68437]: DEBUG nova.network.neutron [req-f014f557-3be4-4e1d-8f56-311090bd1f03 req-7400f662-3191-4155-887c-a1fb3cafb0d5 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Refreshing network info cache for port 9260c00b-559c-42b0-8f99-ffca47f422d7 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 882.529298] env[68437]: DEBUG nova.compute.manager [req-36881e3f-632d-44e5-8249-e7dfa77a1202 req-7540b51a-7406-40dd-8b8f-a9ed9fe4d279 service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Received event network-changed-b15f4c4a-122d-4231-be11-a7d9e18e59ed {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 882.529298] env[68437]: DEBUG nova.compute.manager [req-36881e3f-632d-44e5-8249-e7dfa77a1202 req-7540b51a-7406-40dd-8b8f-a9ed9fe4d279 service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Refreshing instance network info cache due to event network-changed-b15f4c4a-122d-4231-be11-a7d9e18e59ed. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 882.529483] env[68437]: DEBUG oslo_concurrency.lockutils [req-36881e3f-632d-44e5-8249-e7dfa77a1202 req-7540b51a-7406-40dd-8b8f-a9ed9fe4d279 service nova] Acquiring lock "refresh_cache-53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.529624] env[68437]: DEBUG oslo_concurrency.lockutils [req-36881e3f-632d-44e5-8249-e7dfa77a1202 req-7540b51a-7406-40dd-8b8f-a9ed9fe4d279 service nova] Acquired lock "refresh_cache-53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.529786] env[68437]: DEBUG nova.network.neutron [req-36881e3f-632d-44e5-8249-e7dfa77a1202 req-7540b51a-7406-40dd-8b8f-a9ed9fe4d279 service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Refreshing network info cache for port b15f4c4a-122d-4231-be11-a7d9e18e59ed {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 882.672030] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944237, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.861955] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.872498] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944238, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.888665] env[68437]: DEBUG nova.scheduler.client.report [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.928722] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528bf8e1-aae3-50f4-d4e6-fa42970dce96, 'name': SearchDatastore_Task, 'duration_secs': 0.010699} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.931697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.932053] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 6d877579-3095-4ee9-bb3e-4d5a9122f1ed/6d877579-3095-4ee9-bb3e-4d5a9122f1ed.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.932620] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9821b328-0340-4781-95b5-2d3b6cdc71c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.941899] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 882.941899] env[68437]: value = "task-2944239" [ 882.941899] env[68437]: _type = "Task" [ 882.941899] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.954762] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944239, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.967316] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 882.967316] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.967316] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 882.967316] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.967316] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 882.967644] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 882.967644] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 882.967786] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 882.967949] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 882.968120] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 882.968291] env[68437]: DEBUG nova.virt.hardware [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 882.969237] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1af0349-57b4-4ef6-87c3-98db63b82a3b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.979270] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e0ff3a-cd6c-4da4-bc78-a896d7a67ad0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.997296] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.000258] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 883.002297] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.002297] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7518ffe9-6b84-4278-8945-15edf1c9ea15 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.023114] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.023114] env[68437]: value = "task-2944240" [ 883.023114] env[68437]: _type = "Task" [ 883.023114] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.035389] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944240, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.175351] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944237, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.247184] env[68437]: DEBUG nova.network.neutron [req-f014f557-3be4-4e1d-8f56-311090bd1f03 req-7400f662-3191-4155-887c-a1fb3cafb0d5 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Updated VIF entry in instance network info cache for port 9260c00b-559c-42b0-8f99-ffca47f422d7. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 883.247600] env[68437]: DEBUG nova.network.neutron [req-f014f557-3be4-4e1d-8f56-311090bd1f03 req-7400f662-3191-4155-887c-a1fb3cafb0d5 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Updating instance_info_cache with network_info: [{"id": "9260c00b-559c-42b0-8f99-ffca47f422d7", "address": "fa:16:3e:ea:fe:62", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9260c00b-55", "ovs_interfaceid": "9260c00b-559c-42b0-8f99-ffca47f422d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.376827] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944238, 'name': Rename_Task, 'duration_secs': 0.97546} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.377171] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 883.377447] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-948b3755-facf-4ad3-8497-61a755569f6a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.386151] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 883.386151] env[68437]: value = "task-2944241" [ 883.386151] env[68437]: _type = "Task" [ 883.386151] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.395162] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.700s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.396339] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944241, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.396872] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.319s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.398419] env[68437]: INFO nova.compute.claims [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.413215] env[68437]: DEBUG nova.network.neutron [req-36881e3f-632d-44e5-8249-e7dfa77a1202 req-7540b51a-7406-40dd-8b8f-a9ed9fe4d279 service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Updated VIF entry in instance network info cache for port b15f4c4a-122d-4231-be11-a7d9e18e59ed. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 883.413603] env[68437]: DEBUG nova.network.neutron [req-36881e3f-632d-44e5-8249-e7dfa77a1202 req-7540b51a-7406-40dd-8b8f-a9ed9fe4d279 service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Updating instance_info_cache with network_info: [{"id": "b15f4c4a-122d-4231-be11-a7d9e18e59ed", "address": "fa:16:3e:65:71:01", "network": {"id": "4c17c5a2-048e-4021-8471-3f6519d65387", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-4738106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03cd64940cc64e7baceabbc7983889df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb15f4c4a-12", "ovs_interfaceid": "b15f4c4a-122d-4231-be11-a7d9e18e59ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.458633] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944239, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.535431] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944240, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.673752] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944237, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.754479] env[68437]: DEBUG oslo_concurrency.lockutils [req-f014f557-3be4-4e1d-8f56-311090bd1f03 req-7400f662-3191-4155-887c-a1fb3cafb0d5 service nova] Releasing lock "refresh_cache-d84c599e-29b2-45ec-a3f7-54ef85af9a3d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.896302] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944241, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.898065] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquiring lock "455f06f5-cc66-447a-80b1-34f150a73d9c" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.898366] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "455f06f5-cc66-447a-80b1-34f150a73d9c" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.919360] env[68437]: DEBUG oslo_concurrency.lockutils [req-36881e3f-632d-44e5-8249-e7dfa77a1202 req-7540b51a-7406-40dd-8b8f-a9ed9fe4d279 service nova] Releasing lock "refresh_cache-53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.957307] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944239, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682965} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.957515] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 6d877579-3095-4ee9-bb3e-4d5a9122f1ed/6d877579-3095-4ee9-bb3e-4d5a9122f1ed.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 883.957997] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.958311] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0c62e7f9-f083-417f-8f07-b95283af9c30 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.960739] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa657770-86eb-4baa-90cf-7c27c8cd07fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.978733] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance 'b7706bf2-936f-439c-8e9f-b2241d0c211c' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 883.983395] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 883.983395] env[68437]: value = "task-2944242" [ 883.983395] env[68437]: _type = "Task" [ 883.983395] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.991173] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944242, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.034632] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944240, 'name': CreateVM_Task, 'duration_secs': 0.699449} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.034766] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.035411] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.035411] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.035710] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 884.035962] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33ea82c7-da5b-4c02-98dd-dff993709e6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.041114] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 884.041114] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5245e03e-99f9-3022-55f8-03ef16d69933" [ 884.041114] env[68437]: _type = "Task" [ 884.041114] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.048486] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5245e03e-99f9-3022-55f8-03ef16d69933, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.175198] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944237, 'name': CreateVM_Task, 'duration_secs': 1.576007} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.175382] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.176137] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.397463] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944241, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.401133] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "455f06f5-cc66-447a-80b1-34f150a73d9c" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.401570] env[68437]: DEBUG nova.compute.manager [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 884.486291] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.486828] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad0c125c-edca-4caa-a345-096bc8ea75cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.499829] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944242, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073741} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.500617] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 884.500987] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 884.500987] env[68437]: value = "task-2944243" [ 884.500987] env[68437]: _type = "Task" [ 884.500987] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.501695] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644d24e9-2276-45ce-9501-90fe2c91950a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.516987] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944243, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.535365] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 6d877579-3095-4ee9-bb3e-4d5a9122f1ed/6d877579-3095-4ee9-bb3e-4d5a9122f1ed.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.539630] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8028a38-fadf-442c-bd76-2767cbc73583 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.570733] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5245e03e-99f9-3022-55f8-03ef16d69933, 'name': SearchDatastore_Task, 'duration_secs': 0.071137} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.574929] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.575338] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.575807] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.576034] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.576244] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.577024] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 884.577024] env[68437]: value = "task-2944244" [ 884.577024] env[68437]: _type = "Task" [ 884.577024] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.578474] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.578474] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 884.578474] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b5abe2b-b624-4d0d-b884-04876110415a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.580215] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4d41207-177e-4292-9d42-225ad6269ad4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.592254] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.592610] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 884.592610] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5231a9f0-844a-19f9-d19a-41942d83611d" [ 884.592610] env[68437]: _type = "Task" [ 884.592610] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.596459] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.596765] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 884.602610] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-273ee964-83cd-4678-8c95-f46e0c33e874 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.612077] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5231a9f0-844a-19f9-d19a-41942d83611d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.617283] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 884.617283] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5283dc7c-8a26-c6d8-7d8e-92be3a746f17" [ 884.617283] env[68437]: _type = "Task" [ 884.617283] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.626172] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5283dc7c-8a26-c6d8-7d8e-92be3a746f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.897515] env[68437]: DEBUG oslo_vmware.api [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944241, 'name': PowerOnVM_Task, 'duration_secs': 1.080201} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.899912] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 884.900159] env[68437]: DEBUG nova.compute.manager [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 884.901133] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cfc89c-2a4f-4f6b-927e-0a308084304e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.905814] env[68437]: DEBUG nova.compute.utils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 884.908998] env[68437]: DEBUG nova.compute.manager [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 884.909205] env[68437]: DEBUG nova.network.neutron [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 884.948322] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9157c3-9c64-4eaa-bb28-74ee8ff3132c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.952267] env[68437]: DEBUG nova.policy [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18e4bbb415c64fe9ac7c423a70d7fe27', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd621d13761a0416485c24e97967e1a08', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 884.958419] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41279560-93b1-4836-8f01-27cadde05540 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.989594] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b827ffcf-33ad-4027-95f8-0e6d3fcc57a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.996302] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a43ea1-10d1-473b-b308-6764326a041f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.009223] env[68437]: DEBUG nova.compute.provider_tree [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.019715] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944243, 'name': PowerOffVM_Task, 'duration_secs': 0.292817} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.020825] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.021455] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance 'b7706bf2-936f-439c-8e9f-b2241d0c211c' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 885.091947] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944244, 'name': ReconfigVM_Task, 'duration_secs': 0.35265} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.091947] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 6d877579-3095-4ee9-bb3e-4d5a9122f1ed/6d877579-3095-4ee9-bb3e-4d5a9122f1ed.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.091947] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40a3ec30-9227-4ed5-9d03-dab62f33d274 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.098018] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 885.098018] env[68437]: value = "task-2944245" [ 885.098018] env[68437]: _type = "Task" [ 885.098018] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.106522] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944245, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.109913] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5231a9f0-844a-19f9-d19a-41942d83611d, 'name': SearchDatastore_Task, 'duration_secs': 0.023639} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.110345] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.110711] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.111096] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.127399] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5283dc7c-8a26-c6d8-7d8e-92be3a746f17, 'name': SearchDatastore_Task, 'duration_secs': 0.024631} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.128597] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6993771-3039-437e-99c0-66059cefe30a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.135372] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 885.135372] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f208ed-d297-ced6-4039-4488f5f18296" [ 885.135372] env[68437]: _type = "Task" [ 885.135372] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.143279] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f208ed-d297-ced6-4039-4488f5f18296, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.359188] env[68437]: DEBUG nova.network.neutron [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Successfully created port: dff04cc6-cf18-4191-b01e-acba3b9b03b3 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 885.416778] env[68437]: DEBUG nova.compute.manager [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 885.426444] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.512723] env[68437]: DEBUG nova.scheduler.client.report [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.527730] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 885.527978] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.528153] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 885.529524] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.529738] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 885.529903] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 885.530257] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 885.530510] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 885.534020] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 885.534020] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 885.534020] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 885.540091] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f7e84c2-8e0c-46f9-b4fb-5aac8b4a61dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.558441] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 885.558441] env[68437]: value = "task-2944246" [ 885.558441] env[68437]: _type = "Task" [ 885.558441] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.567347] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944246, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.605165] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944245, 'name': Rename_Task, 'duration_secs': 0.149286} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.605482] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.605736] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-afa44346-7814-4cf5-a0a1-341de70760c7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.614433] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 885.614433] env[68437]: value = "task-2944247" [ 885.614433] env[68437]: _type = "Task" [ 885.614433] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.624176] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.646451] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f208ed-d297-ced6-4039-4488f5f18296, 'name': SearchDatastore_Task, 'duration_secs': 0.048527} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.646451] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.646451] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 892bf198-7d05-4995-8137-c57095c5c839/892bf198-7d05-4995-8137-c57095c5c839.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.646451] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.646451] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.646451] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfbe7a98-7ee7-4804-a69e-b8a6acbc6eb4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.648439] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c2d02a1-c836-4794-90ed-4f37893bdb81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.653917] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 885.653917] env[68437]: value = "task-2944248" [ 885.653917] env[68437]: _type = "Task" [ 885.653917] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.658968] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.658968] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.659202] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6bfd565-2f76-440f-bf2a-9e335d16fd4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.664575] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944248, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.667473] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 885.667473] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5290fa47-310f-6321-90cd-0c399ccc0302" [ 885.667473] env[68437]: _type = "Task" [ 885.667473] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.675441] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5290fa47-310f-6321-90cd-0c399ccc0302, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.018762] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.019702] env[68437]: DEBUG nova.compute.manager [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 886.023084] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.095s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.023444] env[68437]: DEBUG nova.objects.instance [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lazy-loading 'resources' on Instance uuid 07d98c5c-ede8-4001-93b2-1b1d83687ca1 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.068854] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944246, 'name': ReconfigVM_Task, 'duration_secs': 0.210845} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.069257] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance 'b7706bf2-936f-439c-8e9f-b2241d0c211c' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 886.121633] env[68437]: DEBUG oslo_vmware.api [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944247, 'name': PowerOnVM_Task, 'duration_secs': 0.490494} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.121931] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.122153] env[68437]: INFO nova.compute.manager [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Took 8.69 seconds to spawn the instance on the hypervisor. [ 886.122344] env[68437]: DEBUG nova.compute.manager [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.123190] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1485ce42-5186-431a-b608-d2bdfcb2c194 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.164210] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944248, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.179502] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5290fa47-310f-6321-90cd-0c399ccc0302, 'name': SearchDatastore_Task, 'duration_secs': 0.008971} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.181070] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73b8345a-e33d-421a-a342-0214605cd972 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.187185] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 886.187185] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a4d0f1-84c6-57c0-a9c8-cd1a15b56c25" [ 886.187185] env[68437]: _type = "Task" [ 886.187185] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.195824] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a4d0f1-84c6-57c0-a9c8-cd1a15b56c25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.433533] env[68437]: DEBUG nova.compute.manager [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 886.453762] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 886.454116] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 886.454335] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 886.454570] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 886.454752] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 886.454934] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 886.455195] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 886.455399] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 886.455620] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 886.455819] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 886.456034] env[68437]: DEBUG nova.virt.hardware [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 886.457069] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93440d7-378c-4409-929f-d5265b3ddc59 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.466933] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9451fe06-4f85-4f85-ad7e-c75e865c36db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.528031] env[68437]: DEBUG nova.compute.utils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 886.528564] env[68437]: DEBUG nova.compute.manager [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 886.528736] env[68437]: DEBUG nova.network.neutron [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 886.574645] env[68437]: DEBUG nova.policy [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57e5f7450f014895aed4b546c1ec5d02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e774b300870a495ca1cc652d8920d32a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 886.578009] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:39:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d926657d-4f9c-4856-bc8e-d77db590d34a',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1416034897',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 886.578009] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 886.578178] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 886.578566] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 886.578566] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 886.578566] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 886.578778] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 886.579241] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 886.579241] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 886.579381] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 886.579656] env[68437]: DEBUG nova.virt.hardware [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 886.585571] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfiguring VM instance instance-0000002e to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 886.588650] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4a4f39f-18e5-4553-9750-f37d1dde9a0b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.610766] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 886.610766] env[68437]: value = "task-2944249" [ 886.610766] env[68437]: _type = "Task" [ 886.610766] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.621969] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944249, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.645740] env[68437]: INFO nova.compute.manager [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Took 49.46 seconds to build instance. [ 886.667158] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944248, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553273} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.670444] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 892bf198-7d05-4995-8137-c57095c5c839/892bf198-7d05-4995-8137-c57095c5c839.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.670444] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.670444] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1ece78a-38c8-4b10-9475-070016d4f2ea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.677412] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 886.677412] env[68437]: value = "task-2944250" [ 886.677412] env[68437]: _type = "Task" [ 886.677412] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.688836] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944250, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.700132] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a4d0f1-84c6-57c0-a9c8-cd1a15b56c25, 'name': SearchDatastore_Task, 'duration_secs': 0.01703} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.700516] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.700836] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] d84c599e-29b2-45ec-a3f7-54ef85af9a3d/d84c599e-29b2-45ec-a3f7-54ef85af9a3d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.701154] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e701fb6-148a-4017-814a-242025ae599a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.707444] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 886.707444] env[68437]: value = "task-2944251" [ 886.707444] env[68437]: _type = "Task" [ 886.707444] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.714902] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.720944] env[68437]: INFO nova.compute.manager [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Rebuilding instance [ 886.760526] env[68437]: DEBUG nova.compute.manager [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.761457] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccd5747-44ae-4a96-8277-bcb078b3b40f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.904260] env[68437]: DEBUG nova.network.neutron [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Successfully created port: 9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.033869] env[68437]: DEBUG nova.compute.manager [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.088025] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143e20a5-53b6-4714-8b4e-4b74c0726ab1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.095564] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed8e9fd-04b5-4ddb-b353-36f872f0352b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.134831] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947818d8-f03c-43aa-b877-836ef3256285 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.141101] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944249, 'name': ReconfigVM_Task, 'duration_secs': 0.251184} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.143526] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfigured VM instance instance-0000002e to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 887.144387] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3489feb-6e48-4845-8e52-56d9ec8fca4b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.147917] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a72c68b-70f8-49ff-ac1a-bccbede6179e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.152582] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d2467897-465d-45dd-b58e-eed67c162ee4 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.262s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.163022] env[68437]: DEBUG nova.compute.provider_tree [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.183531] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] b7706bf2-936f-439c-8e9f-b2241d0c211c/b7706bf2-936f-439c-8e9f-b2241d0c211c.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.185716] env[68437]: DEBUG nova.scheduler.client.report [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.190528] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04057dde-e5b3-4f6d-85dc-070fb05c2992 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.208154] env[68437]: DEBUG nova.compute.manager [req-0e1ad828-b225-42d5-987d-88c8124de06e req-bfbf3ccf-7714-4dc8-9fc8-615382722f05 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Received event network-vif-plugged-dff04cc6-cf18-4191-b01e-acba3b9b03b3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 887.208387] env[68437]: DEBUG oslo_concurrency.lockutils [req-0e1ad828-b225-42d5-987d-88c8124de06e req-bfbf3ccf-7714-4dc8-9fc8-615382722f05 service nova] Acquiring lock "de54bc8d-2626-41fc-970a-865a842a932e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.208596] env[68437]: DEBUG oslo_concurrency.lockutils [req-0e1ad828-b225-42d5-987d-88c8124de06e req-bfbf3ccf-7714-4dc8-9fc8-615382722f05 service nova] Lock "de54bc8d-2626-41fc-970a-865a842a932e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.208920] env[68437]: DEBUG oslo_concurrency.lockutils [req-0e1ad828-b225-42d5-987d-88c8124de06e req-bfbf3ccf-7714-4dc8-9fc8-615382722f05 service nova] Lock "de54bc8d-2626-41fc-970a-865a842a932e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.209139] env[68437]: DEBUG nova.compute.manager [req-0e1ad828-b225-42d5-987d-88c8124de06e req-bfbf3ccf-7714-4dc8-9fc8-615382722f05 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] No waiting events found dispatching network-vif-plugged-dff04cc6-cf18-4191-b01e-acba3b9b03b3 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 887.209301] env[68437]: WARNING nova.compute.manager [req-0e1ad828-b225-42d5-987d-88c8124de06e req-bfbf3ccf-7714-4dc8-9fc8-615382722f05 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Received unexpected event network-vif-plugged-dff04cc6-cf18-4191-b01e-acba3b9b03b3 for instance with vm_state building and task_state spawning. [ 887.210250] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.187s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.216464] env[68437]: DEBUG oslo_concurrency.lockutils [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.667s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.216706] env[68437]: DEBUG nova.objects.instance [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lazy-loading 'resources' on Instance uuid acbf4c5c-341c-4ebd-ad29-90ebf531aa86 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.228796] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 887.228796] env[68437]: value = "task-2944252" [ 887.228796] env[68437]: _type = "Task" [ 887.228796] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.237755] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944250, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096814} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.241670] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.242370] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944251, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.243430] env[68437]: INFO nova.scheduler.client.report [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Deleted allocations for instance 07d98c5c-ede8-4001-93b2-1b1d83687ca1 [ 887.245178] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d67356e-c146-4d6d-87b5-8116ee3cf7f4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.260027] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944252, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.277043] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 892bf198-7d05-4995-8137-c57095c5c839/892bf198-7d05-4995-8137-c57095c5c839.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.279834] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fe332d5-0707-4db0-a244-884b76480f2d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.298132] env[68437]: DEBUG nova.network.neutron [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Successfully updated port: dff04cc6-cf18-4191-b01e-acba3b9b03b3 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 887.303596] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 887.303596] env[68437]: value = "task-2944253" [ 887.303596] env[68437]: _type = "Task" [ 887.303596] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.313869] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944253, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.727085] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.804745} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.727425] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] d84c599e-29b2-45ec-a3f7-54ef85af9a3d/d84c599e-29b2-45ec-a3f7-54ef85af9a3d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 887.728030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 887.728030] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af433b8d-7ad6-466a-ac28-5a9954fffec7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.742022] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944252, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.743315] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 887.743315] env[68437]: value = "task-2944254" [ 887.743315] env[68437]: _type = "Task" [ 887.743315] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.755417] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.762474] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b18726d-c490-444b-b084-a32758caa78a tempest-AttachInterfacesUnderV243Test-1063994586 tempest-AttachInterfacesUnderV243Test-1063994586-project-member] Lock "07d98c5c-ede8-4001-93b2-1b1d83687ca1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.150s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.798499] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquiring lock "refresh_cache-de54bc8d-2626-41fc-970a-865a842a932e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.798710] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquired lock "refresh_cache-de54bc8d-2626-41fc-970a-865a842a932e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.798873] env[68437]: DEBUG nova.network.neutron [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 887.800934] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 887.801747] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a70529e0-1f2d-4294-b578-19559de3ee12 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.818321] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 887.818321] env[68437]: value = "task-2944255" [ 887.818321] env[68437]: _type = "Task" [ 887.818321] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.829945] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944253, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.837018] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944255, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.045203] env[68437]: DEBUG nova.compute.manager [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.073142] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.073425] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.073592] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.073775] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.073922] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.074199] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.074541] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.074788] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.075191] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.075458] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.075711] env[68437]: DEBUG nova.virt.hardware [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.076722] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca09a27-bfa0-46b0-a647-496bbd3289ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.088529] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2df0301-6311-47a3-8e0d-03e3aa561d80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.241341] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944252, 'name': ReconfigVM_Task, 'duration_secs': 0.785619} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.243765] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] b7706bf2-936f-439c-8e9f-b2241d0c211c/b7706bf2-936f-439c-8e9f-b2241d0c211c.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.244154] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance 'b7706bf2-936f-439c-8e9f-b2241d0c211c' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 888.258712] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091631} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.258712] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 888.258712] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b96761-0de0-47d5-8f9a-ca65260cc965 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.279865] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] d84c599e-29b2-45ec-a3f7-54ef85af9a3d/d84c599e-29b2-45ec-a3f7-54ef85af9a3d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.280807] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb4e0c48-76d3-4caa-962e-072f2f78f03b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.299639] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f664e4-a620-4efe-8c2d-6eda3c05fba8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.308596] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 888.308596] env[68437]: value = "task-2944256" [ 888.308596] env[68437]: _type = "Task" [ 888.308596] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.321693] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003eada3-f28f-4a28-aafc-af58227ffe37 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.332930] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944253, 'name': ReconfigVM_Task, 'duration_secs': 0.708586} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.361316] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 892bf198-7d05-4995-8137-c57095c5c839/892bf198-7d05-4995-8137-c57095c5c839.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.361871] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944256, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.362706] env[68437]: DEBUG nova.network.neutron [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 888.367435] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4320f9aa-348f-4982-93eb-a7c2ca296d44 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.369574] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64da6efb-396d-4f75-a78d-de510c7f78c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.373849] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944255, 'name': PowerOffVM_Task, 'duration_secs': 0.311184} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.374477] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.374987] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.376053] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd058086-4ca1-491c-9a03-35c5468a5984 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.383018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c92a6fd-e018-45f4-8131-d1e10de68f27 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.387173] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 888.387173] env[68437]: value = "task-2944257" [ 888.387173] env[68437]: _type = "Task" [ 888.387173] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.389384] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 888.392755] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c193399-a4ea-4cdc-a92e-d01b7e91edf1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.403020] env[68437]: DEBUG nova.compute.provider_tree [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.412536] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944257, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.509472] env[68437]: DEBUG nova.compute.manager [req-76828c6d-e3cb-44c4-930d-c7f8778f3830 req-ad0a85c6-b6a2-4514-99a1-8b9d4307cc44 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Received event network-vif-plugged-9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 888.509696] env[68437]: DEBUG oslo_concurrency.lockutils [req-76828c6d-e3cb-44c4-930d-c7f8778f3830 req-ad0a85c6-b6a2-4514-99a1-8b9d4307cc44 service nova] Acquiring lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.509904] env[68437]: DEBUG oslo_concurrency.lockutils [req-76828c6d-e3cb-44c4-930d-c7f8778f3830 req-ad0a85c6-b6a2-4514-99a1-8b9d4307cc44 service nova] Lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.510359] env[68437]: DEBUG oslo_concurrency.lockutils [req-76828c6d-e3cb-44c4-930d-c7f8778f3830 req-ad0a85c6-b6a2-4514-99a1-8b9d4307cc44 service nova] Lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.510573] env[68437]: DEBUG nova.compute.manager [req-76828c6d-e3cb-44c4-930d-c7f8778f3830 req-ad0a85c6-b6a2-4514-99a1-8b9d4307cc44 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] No waiting events found dispatching network-vif-plugged-9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 888.510761] env[68437]: WARNING nova.compute.manager [req-76828c6d-e3cb-44c4-930d-c7f8778f3830 req-ad0a85c6-b6a2-4514-99a1-8b9d4307cc44 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Received unexpected event network-vif-plugged-9b76d917-ecbb-45a2-8959-400914c3c584 for instance with vm_state building and task_state spawning. [ 888.599812] env[68437]: DEBUG nova.network.neutron [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Updating instance_info_cache with network_info: [{"id": "dff04cc6-cf18-4191-b01e-acba3b9b03b3", "address": "fa:16:3e:26:ed:8a", "network": {"id": "8b66e6b8-41c9-49eb-bad4-6afbcd8a1a19", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1568141300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d621d13761a0416485c24e97967e1a08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdff04cc6-cf", "ovs_interfaceid": "dff04cc6-cf18-4191-b01e-acba3b9b03b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.667254] env[68437]: DEBUG nova.network.neutron [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Successfully updated port: 9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.753977] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f27ea5b-1e88-4e6f-a292-1f8d50a0e17a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.773900] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3389576d-1be5-45d0-8f24-760e6daf44bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.791645] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance 'b7706bf2-936f-439c-8e9f-b2241d0c211c' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 888.827342] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944256, 'name': ReconfigVM_Task, 'duration_secs': 0.349713} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.827662] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Reconfigured VM instance instance-00000038 to attach disk [datastore1] d84c599e-29b2-45ec-a3f7-54ef85af9a3d/d84c599e-29b2-45ec-a3f7-54ef85af9a3d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.828426] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7981170a-8876-4625-9f99-57ba2807da33 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.835016] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 888.835016] env[68437]: value = "task-2944259" [ 888.835016] env[68437]: _type = "Task" [ 888.835016] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.845839] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944259, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.899297] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944257, 'name': Rename_Task, 'duration_secs': 0.170327} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.899624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.899875] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46b40e01-25fa-4dbf-8933-5f48b4b9d3bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.906132] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 888.906132] env[68437]: value = "task-2944260" [ 888.906132] env[68437]: _type = "Task" [ 888.906132] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.910201] env[68437]: DEBUG nova.scheduler.client.report [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 888.918574] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944260, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.920197] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 888.920197] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 888.920197] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleting the datastore file [datastore1] 2f368262-0825-4ccc-9b1e-523b705bcfce {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.922645] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e769e59b-ab19-41e2-8f66-d71d011e4f2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.929259] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 888.929259] env[68437]: value = "task-2944261" [ 888.929259] env[68437]: _type = "Task" [ 888.929259] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.936213] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944261, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.032747] env[68437]: DEBUG nova.compute.manager [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Stashing vm_state: active {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 889.106029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Releasing lock "refresh_cache-de54bc8d-2626-41fc-970a-865a842a932e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.106029] env[68437]: DEBUG nova.compute.manager [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Instance network_info: |[{"id": "dff04cc6-cf18-4191-b01e-acba3b9b03b3", "address": "fa:16:3e:26:ed:8a", "network": {"id": "8b66e6b8-41c9-49eb-bad4-6afbcd8a1a19", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1568141300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d621d13761a0416485c24e97967e1a08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdff04cc6-cf", "ovs_interfaceid": "dff04cc6-cf18-4191-b01e-acba3b9b03b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 889.106029] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:ed:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4df917f7-847a-4c0e-b0e3-69a52e4a1554', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dff04cc6-cf18-4191-b01e-acba3b9b03b3', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 889.113019] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Creating folder: Project (d621d13761a0416485c24e97967e1a08). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 889.113019] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d67353f-d634-49ff-969e-e2e23d9938fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.123776] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Created folder: Project (d621d13761a0416485c24e97967e1a08) in parent group-v590848. [ 889.124082] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Creating folder: Instances. Parent ref: group-v590996. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 889.124320] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04ef23ed-3594-4c01-93cd-cf137ec4eb9e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.135830] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Created folder: Instances in parent group-v590996. [ 889.136141] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 889.136380] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 889.136623] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d3b5ffe-f191-4145-b76e-06c3650368c6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.157189] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 889.157189] env[68437]: value = "task-2944264" [ 889.157189] env[68437]: _type = "Task" [ 889.157189] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.164761] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944264, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.168602] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.168749] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquired lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.168920] env[68437]: DEBUG nova.network.neutron [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 889.320065] env[68437]: DEBUG nova.compute.manager [req-fc6af4fa-0fa5-488b-b65d-968a816be7e4 req-2547cc49-c6dd-45fa-bede-243929dc2593 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Received event network-changed-dff04cc6-cf18-4191-b01e-acba3b9b03b3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 889.320306] env[68437]: DEBUG nova.compute.manager [req-fc6af4fa-0fa5-488b-b65d-968a816be7e4 req-2547cc49-c6dd-45fa-bede-243929dc2593 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Refreshing instance network info cache due to event network-changed-dff04cc6-cf18-4191-b01e-acba3b9b03b3. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 889.320536] env[68437]: DEBUG oslo_concurrency.lockutils [req-fc6af4fa-0fa5-488b-b65d-968a816be7e4 req-2547cc49-c6dd-45fa-bede-243929dc2593 service nova] Acquiring lock "refresh_cache-de54bc8d-2626-41fc-970a-865a842a932e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.320681] env[68437]: DEBUG oslo_concurrency.lockutils [req-fc6af4fa-0fa5-488b-b65d-968a816be7e4 req-2547cc49-c6dd-45fa-bede-243929dc2593 service nova] Acquired lock "refresh_cache-de54bc8d-2626-41fc-970a-865a842a932e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.320841] env[68437]: DEBUG nova.network.neutron [req-fc6af4fa-0fa5-488b-b65d-968a816be7e4 req-2547cc49-c6dd-45fa-bede-243929dc2593 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Refreshing network info cache for port dff04cc6-cf18-4191-b01e-acba3b9b03b3 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 889.339500] env[68437]: DEBUG nova.network.neutron [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Port 6c053c01-e575-4bdc-93ce-3604fa26d1ee binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 889.347155] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944259, 'name': Rename_Task, 'duration_secs': 0.220401} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.347422] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 889.347670] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b543ea08-ebca-460a-a1ab-0dbcfdc38581 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.355067] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 889.355067] env[68437]: value = "task-2944265" [ 889.355067] env[68437]: _type = "Task" [ 889.355067] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.363898] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944265, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.419357] env[68437]: DEBUG oslo_concurrency.lockutils [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.203s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.422110] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944260, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.422569] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.322s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.424892] env[68437]: INFO nova.compute.claims [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.437227] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944261, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337364} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.438750] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.438750] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 889.438750] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 889.443782] env[68437]: INFO nova.scheduler.client.report [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Deleted allocations for instance acbf4c5c-341c-4ebd-ad29-90ebf531aa86 [ 889.549157] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.669187] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944264, 'name': CreateVM_Task, 'duration_secs': 0.338416} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.669187] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 889.669479] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.669592] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.669909] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 889.670175] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06e8ad84-ec0f-4486-b1ff-c81721476770 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.676500] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 889.676500] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527203b8-dbcb-0725-5986-97278dec08c5" [ 889.676500] env[68437]: _type = "Task" [ 889.676500] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.684715] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527203b8-dbcb-0725-5986-97278dec08c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.705917] env[68437]: DEBUG nova.network.neutron [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 889.820310] env[68437]: DEBUG nova.network.neutron [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updating instance_info_cache with network_info: [{"id": "9b76d917-ecbb-45a2-8959-400914c3c584", "address": "fa:16:3e:26:14:b0", "network": {"id": "d686d794-6af9-44b6-b23a-faf14c4ed0b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1123360152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e774b300870a495ca1cc652d8920d32a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b76d917-ec", "ovs_interfaceid": "9b76d917-ecbb-45a2-8959-400914c3c584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.867182] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944265, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.919027] env[68437]: DEBUG oslo_vmware.api [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944260, 'name': PowerOnVM_Task, 'duration_secs': 0.632275} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.919332] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.919545] env[68437]: DEBUG nova.compute.manager [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.920338] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f7af7d-8f8f-4d74-a160-c16a860e1bff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.957361] env[68437]: DEBUG oslo_concurrency.lockutils [None req-51d74904-4793-4fbb-a4c1-4c27d8ce0c0b tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "acbf4c5c-341c-4ebd-ad29-90ebf531aa86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.805s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.034169] env[68437]: DEBUG nova.network.neutron [req-fc6af4fa-0fa5-488b-b65d-968a816be7e4 req-2547cc49-c6dd-45fa-bede-243929dc2593 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Updated VIF entry in instance network info cache for port dff04cc6-cf18-4191-b01e-acba3b9b03b3. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 890.034532] env[68437]: DEBUG nova.network.neutron [req-fc6af4fa-0fa5-488b-b65d-968a816be7e4 req-2547cc49-c6dd-45fa-bede-243929dc2593 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Updating instance_info_cache with network_info: [{"id": "dff04cc6-cf18-4191-b01e-acba3b9b03b3", "address": "fa:16:3e:26:ed:8a", "network": {"id": "8b66e6b8-41c9-49eb-bad4-6afbcd8a1a19", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1568141300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d621d13761a0416485c24e97967e1a08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4df917f7-847a-4c0e-b0e3-69a52e4a1554", "external-id": "cl2-zone-457", "segmentation_id": 457, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdff04cc6-cf", "ovs_interfaceid": "dff04cc6-cf18-4191-b01e-acba3b9b03b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.189929] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527203b8-dbcb-0725-5986-97278dec08c5, 'name': SearchDatastore_Task, 'duration_secs': 0.038046} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.190091] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.190356] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.190623] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.190773] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.190955] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.191233] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5f133b3-9c61-4ba1-9dcf-91f07fcfd84d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.199987] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.200189] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 890.200909] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00900ae5-4b49-40c8-a737-b8c203c850c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.206057] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 890.206057] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527d2022-be60-6d50-b340-d2d393d530e9" [ 890.206057] env[68437]: _type = "Task" [ 890.206057] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.213842] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527d2022-be60-6d50-b340-d2d393d530e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.323384] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Releasing lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.324966] env[68437]: DEBUG nova.compute.manager [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Instance network_info: |[{"id": "9b76d917-ecbb-45a2-8959-400914c3c584", "address": "fa:16:3e:26:14:b0", "network": {"id": "d686d794-6af9-44b6-b23a-faf14c4ed0b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1123360152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e774b300870a495ca1cc652d8920d32a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b76d917-ec", "ovs_interfaceid": "9b76d917-ecbb-45a2-8959-400914c3c584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 890.325515] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:14:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4020f51-6e46-4b73-a79e-9fe3fd51b917', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b76d917-ecbb-45a2-8959-400914c3c584', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.333119] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Creating folder: Project (e774b300870a495ca1cc652d8920d32a). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.333439] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1217e57b-efb9-45f1-b895-bb52aee02f9d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.344748] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Created folder: Project (e774b300870a495ca1cc652d8920d32a) in parent group-v590848. [ 890.344954] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Creating folder: Instances. Parent ref: group-v590999. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.345219] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c7b6a73-2a0a-4962-baa5-7307a9cb63c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.363484] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "b7706bf2-936f-439c-8e9f-b2241d0c211c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.363716] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.363889] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.367949] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Created folder: Instances in parent group-v590999. [ 890.368481] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 890.369551] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.369768] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f5a22db-3223-4bf2-ac3d-1f4428e53001 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.400142] env[68437]: DEBUG oslo_vmware.api [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944265, 'name': PowerOnVM_Task, 'duration_secs': 0.989576} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.400976] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 890.401297] env[68437]: INFO nova.compute.manager [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Took 10.28 seconds to spawn the instance on the hypervisor. [ 890.401521] env[68437]: DEBUG nova.compute.manager [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 890.402966] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8871423a-800b-4219-8214-8d851aedc8ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.408742] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.408742] env[68437]: value = "task-2944268" [ 890.408742] env[68437]: _type = "Task" [ 890.408742] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.427023] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944268, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.444023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.491274] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 890.491645] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.491976] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 890.492302] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.492525] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 890.492743] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 890.493147] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 890.493402] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 890.493673] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 890.493916] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 890.494266] env[68437]: DEBUG nova.virt.hardware [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 890.495927] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28eb390d-ffa4-460b-b1af-bc5621f8e5e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.507338] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c54812-d1f4-4012-8da3-47d3ee8f1208 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.523678] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:4e:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16c6ea68-9b0e-4ac0-a484-7a9a40533017', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a915dbf0-9e3f-41da-b43b-dd0a4225b839', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.531050] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 890.533624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.534046] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19eb3a32-3a5a-404e-bf64-07368e1650c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.551309] env[68437]: DEBUG oslo_concurrency.lockutils [req-fc6af4fa-0fa5-488b-b65d-968a816be7e4 req-2547cc49-c6dd-45fa-bede-243929dc2593 service nova] Releasing lock "refresh_cache-de54bc8d-2626-41fc-970a-865a842a932e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.557249] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.557249] env[68437]: value = "task-2944269" [ 890.557249] env[68437]: _type = "Task" [ 890.557249] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.564733] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944269, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.719619] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527d2022-be60-6d50-b340-d2d393d530e9, 'name': SearchDatastore_Task, 'duration_secs': 0.01413} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.723498] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe665205-e2f4-49bf-8651-bb41a7edd88a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.729554] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 890.729554] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52450777-5d1f-592a-c646-c4dde887fe10" [ 890.729554] env[68437]: _type = "Task" [ 890.729554] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.738880] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52450777-5d1f-592a-c646-c4dde887fe10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.847471] env[68437]: DEBUG nova.compute.manager [req-f7dfb9c2-fe6c-46ef-94db-50d6dc2fad80 req-94a86718-a113-416b-8f61-5218dd79e479 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Received event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 890.847730] env[68437]: DEBUG nova.compute.manager [req-f7dfb9c2-fe6c-46ef-94db-50d6dc2fad80 req-94a86718-a113-416b-8f61-5218dd79e479 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing instance network info cache due to event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 890.848586] env[68437]: DEBUG oslo_concurrency.lockutils [req-f7dfb9c2-fe6c-46ef-94db-50d6dc2fad80 req-94a86718-a113-416b-8f61-5218dd79e479 service nova] Acquiring lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.848586] env[68437]: DEBUG oslo_concurrency.lockutils [req-f7dfb9c2-fe6c-46ef-94db-50d6dc2fad80 req-94a86718-a113-416b-8f61-5218dd79e479 service nova] Acquired lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.848586] env[68437]: DEBUG nova.network.neutron [req-f7dfb9c2-fe6c-46ef-94db-50d6dc2fad80 req-94a86718-a113-416b-8f61-5218dd79e479 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing network info cache for port 9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 890.940858] env[68437]: INFO nova.compute.manager [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Took 49.45 seconds to build instance. [ 890.950717] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944268, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.041515] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9337046-f37b-4c30-a710-4d409e410d68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.049237] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870ab9b0-d664-4fa3-acd6-b7d6de8b7753 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.089356] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa127ccf-c461-474c-8296-ea206e13f093 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.101018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3406e1-c903-4c13-807c-279e9e26c307 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.104999] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944269, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.115467] env[68437]: DEBUG nova.compute.provider_tree [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.242312] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52450777-5d1f-592a-c646-c4dde887fe10, 'name': SearchDatastore_Task, 'duration_secs': 0.013243} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.243330] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.243330] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] de54bc8d-2626-41fc-970a-865a842a932e/de54bc8d-2626-41fc-970a-865a842a932e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 891.243330] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0fea068f-c2da-441a-999c-16298010ef11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.251557] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 891.251557] env[68437]: value = "task-2944270" [ 891.251557] env[68437]: _type = "Task" [ 891.251557] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.259450] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944270, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.423017] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "892bf198-7d05-4995-8137-c57095c5c839" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.423017] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "892bf198-7d05-4995-8137-c57095c5c839" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.423171] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "892bf198-7d05-4995-8137-c57095c5c839-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.423290] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "892bf198-7d05-4995-8137-c57095c5c839-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.423496] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "892bf198-7d05-4995-8137-c57095c5c839-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.425884] env[68437]: INFO nova.compute.manager [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Terminating instance [ 891.428862] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "ad773afa-fc0a-4380-901d-af013ce55f2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.428862] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "ad773afa-fc0a-4380-901d-af013ce55f2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.428862] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "ad773afa-fc0a-4380-901d-af013ce55f2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.429100] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "ad773afa-fc0a-4380-901d-af013ce55f2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.429313] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "ad773afa-fc0a-4380-901d-af013ce55f2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.433614] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.433614] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.433614] env[68437]: DEBUG nova.network.neutron [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 891.436637] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944268, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.436637] env[68437]: INFO nova.compute.manager [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Terminating instance [ 891.453606] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4a7e432-4f97-44d9-8db1-2df6f060ff6e tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.436s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.594847] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944269, 'name': CreateVM_Task, 'duration_secs': 0.583652} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.595039] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.595742] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.596667] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.596667] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 891.596667] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-789d06b6-bfb2-4e0c-9450-83827239f065 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.601962] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 891.601962] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5251bde3-1c1d-703d-7472-24650ce908ee" [ 891.601962] env[68437]: _type = "Task" [ 891.601962] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.610468] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5251bde3-1c1d-703d-7472-24650ce908ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.619879] env[68437]: DEBUG nova.network.neutron [req-f7dfb9c2-fe6c-46ef-94db-50d6dc2fad80 req-94a86718-a113-416b-8f61-5218dd79e479 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updated VIF entry in instance network info cache for port 9b76d917-ecbb-45a2-8959-400914c3c584. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 891.620093] env[68437]: DEBUG nova.network.neutron [req-f7dfb9c2-fe6c-46ef-94db-50d6dc2fad80 req-94a86718-a113-416b-8f61-5218dd79e479 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updating instance_info_cache with network_info: [{"id": "9b76d917-ecbb-45a2-8959-400914c3c584", "address": "fa:16:3e:26:14:b0", "network": {"id": "d686d794-6af9-44b6-b23a-faf14c4ed0b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1123360152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e774b300870a495ca1cc652d8920d32a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b76d917-ec", "ovs_interfaceid": "9b76d917-ecbb-45a2-8959-400914c3c584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.622119] env[68437]: DEBUG nova.scheduler.client.report [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 891.762179] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944270, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479515} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.762480] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] de54bc8d-2626-41fc-970a-865a842a932e/de54bc8d-2626-41fc-970a-865a842a932e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 891.762700] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 891.762952] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cafb7ccb-a764-4d95-b903-7b5c30c94f92 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.769204] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 891.769204] env[68437]: value = "task-2944271" [ 891.769204] env[68437]: _type = "Task" [ 891.769204] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.777896] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944271, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.927278] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944268, 'name': CreateVM_Task, 'duration_secs': 1.344329} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.927886] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.929798] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.933073] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "refresh_cache-892bf198-7d05-4995-8137-c57095c5c839" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.933073] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquired lock "refresh_cache-892bf198-7d05-4995-8137-c57095c5c839" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.933073] env[68437]: DEBUG nova.network.neutron [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 891.945026] env[68437]: DEBUG nova.compute.manager [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 891.945026] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.948323] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214de8eb-ab2d-4988-806e-4b2aa7940da8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.953924] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.954203] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-005dd994-956c-4164-bfcf-f7bee01efac8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.965816] env[68437]: DEBUG oslo_vmware.api [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 891.965816] env[68437]: value = "task-2944272" [ 891.965816] env[68437]: _type = "Task" [ 891.965816] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.979178] env[68437]: DEBUG oslo_vmware.api [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.114008] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5251bde3-1c1d-703d-7472-24650ce908ee, 'name': SearchDatastore_Task, 'duration_secs': 0.058362} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.114288] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.114527] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.114765] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.114910] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.115105] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.115390] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.115697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 892.116739] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-369f9a5b-a69c-493f-b86e-248d0f08fe81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.119280] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b180d2f-ef85-4124-acd9-d15626735763 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.125764] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 892.125764] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521a5cdd-fe6f-1622-aebd-21426dec27b7" [ 892.125764] env[68437]: _type = "Task" [ 892.125764] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.126413] env[68437]: DEBUG oslo_concurrency.lockutils [req-f7dfb9c2-fe6c-46ef-94db-50d6dc2fad80 req-94a86718-a113-416b-8f61-5218dd79e479 service nova] Releasing lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.127889] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.128404] env[68437]: DEBUG nova.compute.manager [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 892.136197] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.808s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.136430] env[68437]: DEBUG nova.objects.instance [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lazy-loading 'resources' on Instance uuid 26985e45-21ff-40bb-ac2b-c6f3700ccc97 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 892.137676] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.137880] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.138885] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a4a8f2e-1824-46fa-a165-9df0a9735384 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.146599] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521a5cdd-fe6f-1622-aebd-21426dec27b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.151075] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 892.151075] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52182ab2-c0a2-7221-4b12-73fe6282d353" [ 892.151075] env[68437]: _type = "Task" [ 892.151075] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.162576] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52182ab2-c0a2-7221-4b12-73fe6282d353, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.254029] env[68437]: DEBUG nova.network.neutron [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance_info_cache with network_info: [{"id": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "address": "fa:16:3e:17:4e:f5", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c053c01-e5", "ovs_interfaceid": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.281893] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944271, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080922} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.282455] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 892.282964] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb06dff2-ddc9-4d35-8ae2-162a60c04632 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.310259] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] de54bc8d-2626-41fc-970a-865a842a932e/de54bc8d-2626-41fc-970a-865a842a932e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.310954] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6aabca94-c837-4173-8d1d-ae2934c1ec6b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.332441] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 892.332441] env[68437]: value = "task-2944273" [ 892.332441] env[68437]: _type = "Task" [ 892.332441] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.340731] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944273, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.454871] env[68437]: DEBUG nova.network.neutron [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 892.482068] env[68437]: DEBUG oslo_vmware.api [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944272, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.576265] env[68437]: DEBUG nova.network.neutron [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.635451] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521a5cdd-fe6f-1622-aebd-21426dec27b7, 'name': SearchDatastore_Task, 'duration_secs': 0.025039} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.635790] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.636042] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.636262] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.643017] env[68437]: DEBUG nova.compute.utils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 892.644589] env[68437]: DEBUG nova.compute.manager [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 892.644760] env[68437]: DEBUG nova.network.neutron [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 892.662534] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52182ab2-c0a2-7221-4b12-73fe6282d353, 'name': SearchDatastore_Task, 'duration_secs': 0.014157} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.664789] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-629f6e2b-e6a2-49fd-b180-0baf0ba3b475 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.670802] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 892.670802] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521df58c-65f8-5703-543c-5ffd5e8f59ce" [ 892.670802] env[68437]: _type = "Task" [ 892.670802] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.681563] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521df58c-65f8-5703-543c-5ffd5e8f59ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.701137] env[68437]: DEBUG nova.policy [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2dda8b15280c4d2282d4dc88aa3d607d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63985eb5e5fb47958fd673bd0ce73f2d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 892.757707] env[68437]: DEBUG oslo_concurrency.lockutils [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.848749] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944273, 'name': ReconfigVM_Task, 'duration_secs': 0.313777} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.848749] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Reconfigured VM instance instance-00000039 to attach disk [datastore2] de54bc8d-2626-41fc-970a-865a842a932e/de54bc8d-2626-41fc-970a-865a842a932e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.848749] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7cd9e001-de9c-44ee-874e-848aabb37226 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.854263] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 892.854263] env[68437]: value = "task-2944274" [ 892.854263] env[68437]: _type = "Task" [ 892.854263] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.864288] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944274, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.980779] env[68437]: DEBUG nova.compute.manager [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 892.981172] env[68437]: DEBUG oslo_vmware.api [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944272, 'name': PowerOffVM_Task, 'duration_secs': 0.617768} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.985031] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627f5d94-5897-48e1-b3db-6a6f92be5fc8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.987944] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 892.988155] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 892.988723] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04d0931f-8613-4d64-a97e-64b25f74d9a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.065056] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.065056] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.065659] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Deleting the datastore file [datastore1] ad773afa-fc0a-4380-901d-af013ce55f2b {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.065963] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8b4d6e5-7aca-4bdf-b1af-c1211383153e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.073961] env[68437]: DEBUG oslo_vmware.api [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for the task: (returnval){ [ 893.073961] env[68437]: value = "task-2944276" [ 893.073961] env[68437]: _type = "Task" [ 893.073961] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.084018] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Releasing lock "refresh_cache-892bf198-7d05-4995-8137-c57095c5c839" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.085042] env[68437]: DEBUG nova.compute.manager [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 893.085284] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 893.085612] env[68437]: DEBUG oslo_vmware.api [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944276, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.091607] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6c3a9a-01f4-4d06-b5b1-0fc15f0e1f9f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.099785] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.100078] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-818dbf3e-6d76-41ab-bbd6-34cdf86bd10c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.110266] env[68437]: DEBUG oslo_vmware.api [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 893.110266] env[68437]: value = "task-2944277" [ 893.110266] env[68437]: _type = "Task" [ 893.110266] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.121227] env[68437]: DEBUG oslo_vmware.api [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.147798] env[68437]: DEBUG nova.compute.manager [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 893.186121] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521df58c-65f8-5703-543c-5ffd5e8f59ce, 'name': SearchDatastore_Task, 'duration_secs': 0.034356} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.188172] env[68437]: DEBUG nova.network.neutron [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Successfully created port: 120267fe-8525-41c2-868c-f8e2af61464c {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.192246] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.192246] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.199027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.199027] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.199027] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8c8c3d5-660b-4fdc-a5e5-53d656dc00df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.200352] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ed93442-7d51-4619-97fd-41f7b1296a78 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.209546] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 893.209546] env[68437]: value = "task-2944278" [ 893.209546] env[68437]: _type = "Task" [ 893.209546] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.219246] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.219722] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.220705] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b933355c-5c25-40a2-bcd7-3bd30c6d2585 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.235023] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.235023] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 893.235023] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5212bf44-96b0-dc8e-7ba5-60fc32928c7b" [ 893.235023] env[68437]: _type = "Task" [ 893.235023] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.244911] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5212bf44-96b0-dc8e-7ba5-60fc32928c7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.294320] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1299b19-bf2f-4238-aa09-4a60c5f8d7ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.321726] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee259ae6-41ff-414a-85aa-327fc181edc5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.330288] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance 'b7706bf2-936f-439c-8e9f-b2241d0c211c' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 893.372027] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944274, 'name': Rename_Task, 'duration_secs': 0.205526} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.372027] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 893.372027] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97a4ee88-1a35-4eed-a80c-598f48b0dadf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.376862] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 893.376862] env[68437]: value = "task-2944279" [ 893.376862] env[68437]: _type = "Task" [ 893.376862] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.393782] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944279, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.499303] env[68437]: INFO nova.compute.manager [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] instance snapshotting [ 893.502286] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b2875b-2a5a-41dc-9142-b8d73d7185fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.523340] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c51685-8ba2-46cc-98a6-248fbbd8dca2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.526531] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cc58e7-22ca-4d3e-8516-c8dce788202b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.536878] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5574d8a-0eea-4bf9-8e62-2c7b7a328112 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.573043] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b2a796-cabc-4478-921d-41f5efcd5345 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.588032] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0705919-3a0b-46b4-9caa-b6eea4ccd864 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.591737] env[68437]: DEBUG oslo_vmware.api [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Task: {'id': task-2944276, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.473561} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.592775] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.593436] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 893.593436] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 893.593436] env[68437]: INFO nova.compute.manager [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Took 1.65 seconds to destroy the instance on the hypervisor. [ 893.593840] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 893.594286] env[68437]: DEBUG nova.compute.manager [-] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 893.594433] env[68437]: DEBUG nova.network.neutron [-] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 893.610900] env[68437]: DEBUG nova.compute.provider_tree [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.627035] env[68437]: DEBUG oslo_vmware.api [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944277, 'name': PowerOffVM_Task, 'duration_secs': 0.288585} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.627035] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.627035] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.627035] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13129fdd-2741-4935-8386-f7431142e5fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.654479] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.654861] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.655192] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Deleting the datastore file [datastore1] 892bf198-7d05-4995-8137-c57095c5c839 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.661300] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4000b13-37d3-4b72-b09f-a003d3ffd670 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.673416] env[68437]: DEBUG oslo_vmware.api [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for the task: (returnval){ [ 893.673416] env[68437]: value = "task-2944281" [ 893.673416] env[68437]: _type = "Task" [ 893.673416] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.686091] env[68437]: DEBUG oslo_vmware.api [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.725250] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944278, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.748515] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5212bf44-96b0-dc8e-7ba5-60fc32928c7b, 'name': SearchDatastore_Task, 'duration_secs': 0.010074} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.749857] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-509bb5cb-ef47-4787-8063-a155473b2b24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.758503] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 893.758503] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5287a338-22b1-89ae-7f47-ad4c32103606" [ 893.758503] env[68437]: _type = "Task" [ 893.758503] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.773595] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5287a338-22b1-89ae-7f47-ad4c32103606, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.839110] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 893.839926] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f72c4b4c-d1ae-4f91-ac01-d23308de6dd3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.849733] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 893.849733] env[68437]: value = "task-2944282" [ 893.849733] env[68437]: _type = "Task" [ 893.849733] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.874289] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944282, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.894862] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944279, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.048284] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 894.048892] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3b98fc80-3acf-4aac-8f49-45c9bd5b2790 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.056067] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 894.056067] env[68437]: value = "task-2944283" [ 894.056067] env[68437]: _type = "Task" [ 894.056067] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.064788] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944283, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.113048] env[68437]: DEBUG nova.scheduler.client.report [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 894.168197] env[68437]: DEBUG nova.compute.manager [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 894.184547] env[68437]: DEBUG oslo_vmware.api [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Task: {'id': task-2944281, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287454} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.185093] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.185296] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 894.185466] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.185694] env[68437]: INFO nova.compute.manager [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Took 1.10 seconds to destroy the instance on the hypervisor. [ 894.185993] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 894.186296] env[68437]: DEBUG nova.compute.manager [-] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 894.186411] env[68437]: DEBUG nova.network.neutron [-] [instance: 892bf198-7d05-4995-8137-c57095c5c839] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 894.211058] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 894.214020] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.214020] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 894.214020] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.214020] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 894.214020] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 894.214020] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 894.214020] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 894.214020] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 894.218044] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 894.218044] env[68437]: DEBUG nova.virt.hardware [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 894.218044] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76f930e-3351-44f4-afcc-e7fcb809b912 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.224149] env[68437]: DEBUG nova.network.neutron [-] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 894.247128] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944278, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.729806} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.247887] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 894.248954] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.251474] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66349f53-5ae7-43a1-9331-0e5e3881bfcb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.259850] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-278e4d46-dcd0-42b6-8ba0-5b5cc1291999 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.298795] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 894.298795] env[68437]: value = "task-2944284" [ 894.298795] env[68437]: _type = "Task" [ 894.298795] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.310223] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5287a338-22b1-89ae-7f47-ad4c32103606, 'name': SearchDatastore_Task, 'duration_secs': 0.056137} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.311151] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.311554] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] c9d26fd4-f780-4986-8a5f-dea041a70f5d/c9d26fd4-f780-4986-8a5f-dea041a70f5d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 894.312293] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae74efa7-456a-4b83-a3af-66776838a6f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.320333] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.325720] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 894.325720] env[68437]: value = "task-2944285" [ 894.325720] env[68437]: _type = "Task" [ 894.325720] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.338608] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.361354] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944282, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.388675] env[68437]: DEBUG oslo_vmware.api [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944279, 'name': PowerOnVM_Task, 'duration_secs': 0.963881} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.388934] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.389177] env[68437]: INFO nova.compute.manager [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Took 7.96 seconds to spawn the instance on the hypervisor. [ 894.389374] env[68437]: DEBUG nova.compute.manager [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.390271] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c647d2-625f-4093-9ace-d5ad73e956cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.479688] env[68437]: DEBUG nova.compute.manager [req-65661487-295e-4915-a016-fa8f0366b173 req-1c80b226-8f5d-4b71-8c09-d4c7ac93cc2a service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Received event network-vif-deleted-b1d3a732-f87f-4b26-a261-f7dccc5912ac {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 894.479936] env[68437]: INFO nova.compute.manager [req-65661487-295e-4915-a016-fa8f0366b173 req-1c80b226-8f5d-4b71-8c09-d4c7ac93cc2a service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Neutron deleted interface b1d3a732-f87f-4b26-a261-f7dccc5912ac; detaching it from the instance and deleting it from the info cache [ 894.483577] env[68437]: DEBUG nova.network.neutron [req-65661487-295e-4915-a016-fa8f0366b173 req-1c80b226-8f5d-4b71-8c09-d4c7ac93cc2a service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.572020] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944283, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.618374] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.482s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.621940] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.804s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.621940] env[68437]: DEBUG nova.objects.instance [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lazy-loading 'resources' on Instance uuid f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.676520] env[68437]: INFO nova.scheduler.client.report [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Deleted allocations for instance 26985e45-21ff-40bb-ac2b-c6f3700ccc97 [ 894.738569] env[68437]: DEBUG nova.network.neutron [-] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.796442] env[68437]: DEBUG nova.network.neutron [-] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.816551] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164684} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.817269] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.821753] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfcb137-f030-481c-b850-13bc692ae067 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.876872] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.876872] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944285, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.881224] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27defdf3-cbbc-4f88-8157-8feff8fa4ca4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.906656] env[68437]: DEBUG oslo_vmware.api [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944282, 'name': PowerOnVM_Task, 'duration_secs': 0.824375} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.908766] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.909097] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-53ba78a5-559f-4245-9501-7b3e1cc257db tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance 'b7706bf2-936f-439c-8e9f-b2241d0c211c' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 894.913266] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 894.913266] env[68437]: value = "task-2944286" [ 894.913266] env[68437]: _type = "Task" [ 894.913266] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.919113] env[68437]: INFO nova.compute.manager [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Took 49.16 seconds to build instance. [ 894.928691] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.957834] env[68437]: DEBUG nova.compute.manager [req-17e87804-5d5f-45a3-b14a-fe2c001a449b req-14d4d125-3fae-4312-b1db-c0512804edc0 service nova] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Received event network-vif-plugged-120267fe-8525-41c2-868c-f8e2af61464c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 894.958301] env[68437]: DEBUG oslo_concurrency.lockutils [req-17e87804-5d5f-45a3-b14a-fe2c001a449b req-14d4d125-3fae-4312-b1db-c0512804edc0 service nova] Acquiring lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.958626] env[68437]: DEBUG oslo_concurrency.lockutils [req-17e87804-5d5f-45a3-b14a-fe2c001a449b req-14d4d125-3fae-4312-b1db-c0512804edc0 service nova] Lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.958972] env[68437]: DEBUG oslo_concurrency.lockutils [req-17e87804-5d5f-45a3-b14a-fe2c001a449b req-14d4d125-3fae-4312-b1db-c0512804edc0 service nova] Lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.959065] env[68437]: DEBUG nova.compute.manager [req-17e87804-5d5f-45a3-b14a-fe2c001a449b req-14d4d125-3fae-4312-b1db-c0512804edc0 service nova] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] No waiting events found dispatching network-vif-plugged-120267fe-8525-41c2-868c-f8e2af61464c {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 894.959300] env[68437]: WARNING nova.compute.manager [req-17e87804-5d5f-45a3-b14a-fe2c001a449b req-14d4d125-3fae-4312-b1db-c0512804edc0 service nova] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Received unexpected event network-vif-plugged-120267fe-8525-41c2-868c-f8e2af61464c for instance with vm_state building and task_state spawning. [ 894.986571] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-06f7f8ef-d358-43da-954e-7d639f8f0881 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.998093] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bc446a-f240-4d07-9f4f-ba80491e50be {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.038731] env[68437]: DEBUG nova.compute.manager [req-65661487-295e-4915-a016-fa8f0366b173 req-1c80b226-8f5d-4b71-8c09-d4c7ac93cc2a service nova] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Detach interface failed, port_id=b1d3a732-f87f-4b26-a261-f7dccc5912ac, reason: Instance ad773afa-fc0a-4380-901d-af013ce55f2b could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 895.063816] env[68437]: DEBUG nova.network.neutron [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Successfully updated port: 120267fe-8525-41c2-868c-f8e2af61464c {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 895.072264] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944283, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.186822] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17d9009d-f4d0-4d1f-bd58-b725d3935267 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.472s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.187934] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 34.627s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.188195] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.188441] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.188628] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.191908] env[68437]: INFO nova.compute.manager [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Terminating instance [ 895.241230] env[68437]: INFO nova.compute.manager [-] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Took 1.05 seconds to deallocate network for instance. [ 895.302339] env[68437]: INFO nova.compute.manager [-] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Took 1.71 seconds to deallocate network for instance. [ 895.339063] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.77729} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.339063] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] c9d26fd4-f780-4986-8a5f-dea041a70f5d/c9d26fd4-f780-4986-8a5f-dea041a70f5d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 895.339244] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.339369] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8aeb9b3e-d9b5-4782-a417-530ac7637a75 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.347093] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 895.347093] env[68437]: value = "task-2944287" [ 895.347093] env[68437]: _type = "Task" [ 895.347093] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.359014] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944287, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.426369] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8a65d52c-ac62-48ae-b18d-7b15df1271f5 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "de54bc8d-2626-41fc-970a-865a842a932e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.667s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.433838] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.569283] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "refresh_cache-832c99fc-0f09-4ccb-96f9-894ce62eb17e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.569381] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquired lock "refresh_cache-832c99fc-0f09-4ccb-96f9-894ce62eb17e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.569565] env[68437]: DEBUG nova.network.neutron [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 895.570834] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944283, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.652744] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4eacc4b-4388-4cb8-bfc2-a86f2ed1f63d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.661548] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3867fa9d-4413-4f99-9bb7-66c833e309eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.697756] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.698186] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquired lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.698504] env[68437]: DEBUG nova.network.neutron [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 895.700569] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e20886-af98-4c30-b108-14ed64bfd9d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.710584] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3df9ef-276a-4b5f-934f-5b920c46a6db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.729411] env[68437]: DEBUG nova.compute.provider_tree [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.750666] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.811152] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.857539] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944287, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.196967} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.857916] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.859343] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e503062-ceaf-4fdf-9744-1bd3134a3674 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.883339] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] c9d26fd4-f780-4986-8a5f-dea041a70f5d/c9d26fd4-f780-4986-8a5f-dea041a70f5d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.883688] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7092296d-ec95-47fb-a265-02ac631e0d03 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.905799] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 895.905799] env[68437]: value = "task-2944288" [ 895.905799] env[68437]: _type = "Task" [ 895.905799] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.913877] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944288, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.925058] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944286, 'name': ReconfigVM_Task, 'duration_secs': 0.799068} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.925356] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 2f368262-0825-4ccc-9b1e-523b705bcfce/2f368262-0825-4ccc-9b1e-523b705bcfce.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.926028] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0653a1b4-6c07-4446-8a3f-1d67e76faf8e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.939740] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 895.939740] env[68437]: value = "task-2944289" [ 895.939740] env[68437]: _type = "Task" [ 895.939740] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.952857] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944289, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.069787] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944283, 'name': CreateSnapshot_Task, 'duration_secs': 1.926249} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.070173] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 896.070870] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ded3e3-268e-427a-9aeb-1b13b8e4c894 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.120810] env[68437]: DEBUG nova.network.neutron [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 896.206221] env[68437]: DEBUG nova.compute.utils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Can not refresh info_cache because instance was not found {{(pid=68437) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 896.222736] env[68437]: DEBUG nova.network.neutron [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 896.233047] env[68437]: DEBUG nova.scheduler.client.report [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.276352] env[68437]: DEBUG nova.network.neutron [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Updating instance_info_cache with network_info: [{"id": "120267fe-8525-41c2-868c-f8e2af61464c", "address": "fa:16:3e:c1:0a:8c", "network": {"id": "2931ca1e-83cd-4f65-9712-488dad412e0d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-642400798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63985eb5e5fb47958fd673bd0ce73f2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120267fe-85", "ovs_interfaceid": "120267fe-8525-41c2-868c-f8e2af61464c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.333765] env[68437]: DEBUG nova.network.neutron [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.419503] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.460460] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944289, 'name': Rename_Task, 'duration_secs': 0.187387} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.460735] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 896.460986] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac35dbae-ec12-4730-959e-aac3495eae6a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.467200] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 896.467200] env[68437]: value = "task-2944290" [ 896.467200] env[68437]: _type = "Task" [ 896.467200] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.480890] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.597026] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 896.597363] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6aa59117-7236-42a3-9cbc-c076c67fb49b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.604993] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 896.604993] env[68437]: value = "task-2944291" [ 896.604993] env[68437]: _type = "Task" [ 896.604993] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.613498] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944291, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.739938] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.118s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.743076] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.300s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.744282] env[68437]: INFO nova.compute.claims [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.763033] env[68437]: INFO nova.scheduler.client.report [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Deleted allocations for instance f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81 [ 896.780286] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Releasing lock "refresh_cache-832c99fc-0f09-4ccb-96f9-894ce62eb17e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.780694] env[68437]: DEBUG nova.compute.manager [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Instance network_info: |[{"id": "120267fe-8525-41c2-868c-f8e2af61464c", "address": "fa:16:3e:c1:0a:8c", "network": {"id": "2931ca1e-83cd-4f65-9712-488dad412e0d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-642400798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63985eb5e5fb47958fd673bd0ce73f2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120267fe-85", "ovs_interfaceid": "120267fe-8525-41c2-868c-f8e2af61464c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 896.781236] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:0a:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0954fad3-d24d-496c-83e6-a09d3cb556fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '120267fe-8525-41c2-868c-f8e2af61464c', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.791111] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.791111] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.791310] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee0635ec-8dfb-4ffa-ae83-ad1e98f89a53 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.813751] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.813751] env[68437]: value = "task-2944292" [ 896.813751] env[68437]: _type = "Task" [ 896.813751] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.825731] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944292, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.836369] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Releasing lock "refresh_cache-26985e45-21ff-40bb-ac2b-c6f3700ccc97" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.836861] env[68437]: DEBUG nova.compute.manager [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 896.837047] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 896.837341] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ab7c2fe-209d-4170-a0ab-fdd914a92e2d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.849251] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b2600e-751e-4499-a73f-c83a3293114d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.892817] env[68437]: WARNING nova.virt.vmwareapi.vmops [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 26985e45-21ff-40bb-ac2b-c6f3700ccc97 could not be found. [ 896.893081] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.893231] env[68437]: INFO nova.compute.manager [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Took 0.06 seconds to destroy the instance on the hypervisor. [ 896.894181] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.894589] env[68437]: DEBUG nova.compute.manager [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 896.894689] env[68437]: DEBUG nova.network.neutron [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 896.915234] env[68437]: DEBUG nova.network.neutron [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 896.919648] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944288, 'name': ReconfigVM_Task, 'duration_secs': 0.685164} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.920211] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Reconfigured VM instance instance-0000003a to attach disk [datastore2] c9d26fd4-f780-4986-8a5f-dea041a70f5d/c9d26fd4-f780-4986-8a5f-dea041a70f5d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.920895] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae2dfd36-2be2-4f06-b430-8ec921bbabf3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.927842] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 896.927842] env[68437]: value = "task-2944293" [ 896.927842] env[68437]: _type = "Task" [ 896.927842] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.937962] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944293, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.978706] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944290, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.117106] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944291, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.121756] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "39c532b1-b05e-4354-ad8f-9223b06e9488" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.122106] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "39c532b1-b05e-4354-ad8f-9223b06e9488" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.122359] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "39c532b1-b05e-4354-ad8f-9223b06e9488-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.122569] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "39c532b1-b05e-4354-ad8f-9223b06e9488-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.122770] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "39c532b1-b05e-4354-ad8f-9223b06e9488-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.125154] env[68437]: INFO nova.compute.manager [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Terminating instance [ 897.205211] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquiring lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.205489] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.277791] env[68437]: DEBUG oslo_concurrency.lockutils [None req-72eac214-c14b-4609-bc53-1956e67930ba tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.923s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.312862] env[68437]: DEBUG nova.compute.manager [req-676ecd7d-b73a-43e9-a1dd-fbe3ebc93094 req-d36e70b2-4ec8-4677-9937-2be8de601970 service nova] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Received event network-changed-120267fe-8525-41c2-868c-f8e2af61464c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 897.312918] env[68437]: DEBUG nova.compute.manager [req-676ecd7d-b73a-43e9-a1dd-fbe3ebc93094 req-d36e70b2-4ec8-4677-9937-2be8de601970 service nova] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Refreshing instance network info cache due to event network-changed-120267fe-8525-41c2-868c-f8e2af61464c. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 897.313223] env[68437]: DEBUG oslo_concurrency.lockutils [req-676ecd7d-b73a-43e9-a1dd-fbe3ebc93094 req-d36e70b2-4ec8-4677-9937-2be8de601970 service nova] Acquiring lock "refresh_cache-832c99fc-0f09-4ccb-96f9-894ce62eb17e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.314925] env[68437]: DEBUG oslo_concurrency.lockutils [req-676ecd7d-b73a-43e9-a1dd-fbe3ebc93094 req-d36e70b2-4ec8-4677-9937-2be8de601970 service nova] Acquired lock "refresh_cache-832c99fc-0f09-4ccb-96f9-894ce62eb17e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.314925] env[68437]: DEBUG nova.network.neutron [req-676ecd7d-b73a-43e9-a1dd-fbe3ebc93094 req-d36e70b2-4ec8-4677-9937-2be8de601970 service nova] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Refreshing network info cache for port 120267fe-8525-41c2-868c-f8e2af61464c {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 897.325028] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944292, 'name': CreateVM_Task, 'duration_secs': 0.400153} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.325933] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.326476] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.326681] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.327058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 897.327680] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95180929-e975-4a7e-866a-302ddbcdd183 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.333343] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 897.333343] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529bbf0d-1a1f-0f71-1dfc-17cdd9d4e337" [ 897.333343] env[68437]: _type = "Task" [ 897.333343] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.342663] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529bbf0d-1a1f-0f71-1dfc-17cdd9d4e337, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.371585] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquiring lock "de54bc8d-2626-41fc-970a-865a842a932e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.371737] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "de54bc8d-2626-41fc-970a-865a842a932e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.372158] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquiring lock "de54bc8d-2626-41fc-970a-865a842a932e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.372392] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "de54bc8d-2626-41fc-970a-865a842a932e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.372580] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "de54bc8d-2626-41fc-970a-865a842a932e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.377548] env[68437]: INFO nova.compute.manager [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Terminating instance [ 897.421463] env[68437]: DEBUG nova.network.neutron [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.438718] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944293, 'name': Rename_Task, 'duration_secs': 0.171035} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.438718] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.438718] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac88717a-d2e8-4c4b-bab0-9ff5939fcce0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.445883] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 897.445883] env[68437]: value = "task-2944294" [ 897.445883] env[68437]: _type = "Task" [ 897.445883] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.460019] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944294, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.478795] env[68437]: DEBUG oslo_vmware.api [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944290, 'name': PowerOnVM_Task, 'duration_secs': 0.55445} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.479663] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 897.479663] env[68437]: DEBUG nova.compute.manager [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 897.481334] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5180edb5-209d-4ec0-bf84-58908e1e6a79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.616701] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944291, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.629703] env[68437]: DEBUG nova.compute.manager [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 897.630346] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 897.631209] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86e1b99-0f72-4b39-a3f8-6e64a228ec4b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.640910] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.641271] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b37e22a-a225-4b43-8da7-09e0cbaf5743 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.649182] env[68437]: DEBUG oslo_vmware.api [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 897.649182] env[68437]: value = "task-2944295" [ 897.649182] env[68437]: _type = "Task" [ 897.649182] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.662875] env[68437]: DEBUG oslo_vmware.api [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.708504] env[68437]: DEBUG nova.compute.manager [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 897.844827] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529bbf0d-1a1f-0f71-1dfc-17cdd9d4e337, 'name': SearchDatastore_Task, 'duration_secs': 0.015745} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.848301] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.848301] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.848301] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.848747] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.848747] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.849115] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8df31977-f490-4523-b7ce-73c7d541132a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.858883] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.860142] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.860142] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b057db2-6735-4e15-8045-1b7beaaaa31e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.867942] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 897.867942] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b7cef4-dd54-d3cb-efd6-d20fb1159b99" [ 897.867942] env[68437]: _type = "Task" [ 897.867942] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.876879] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b7cef4-dd54-d3cb-efd6-d20fb1159b99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.882594] env[68437]: DEBUG nova.compute.manager [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 897.884017] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 897.884017] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdad9476-1b46-49fd-8f2a-5fff3db475a8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.890443] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.890961] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89f9b021-d3e8-4b94-9de1-c1364deb7413 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.904365] env[68437]: DEBUG oslo_vmware.api [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 897.904365] env[68437]: value = "task-2944296" [ 897.904365] env[68437]: _type = "Task" [ 897.904365] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.913394] env[68437]: DEBUG oslo_vmware.api [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944296, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.927641] env[68437]: INFO nova.compute.manager [-] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Took 1.03 seconds to deallocate network for instance. [ 897.966400] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944294, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.967481] env[68437]: DEBUG nova.network.neutron [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Port 6c053c01-e575-4bdc-93ce-3604fa26d1ee binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 897.967751] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.967941] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.968172] env[68437]: DEBUG nova.network.neutron [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 898.009179] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.081106] env[68437]: DEBUG nova.network.neutron [req-676ecd7d-b73a-43e9-a1dd-fbe3ebc93094 req-d36e70b2-4ec8-4677-9937-2be8de601970 service nova] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Updated VIF entry in instance network info cache for port 120267fe-8525-41c2-868c-f8e2af61464c. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 898.081605] env[68437]: DEBUG nova.network.neutron [req-676ecd7d-b73a-43e9-a1dd-fbe3ebc93094 req-d36e70b2-4ec8-4677-9937-2be8de601970 service nova] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Updating instance_info_cache with network_info: [{"id": "120267fe-8525-41c2-868c-f8e2af61464c", "address": "fa:16:3e:c1:0a:8c", "network": {"id": "2931ca1e-83cd-4f65-9712-488dad412e0d", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-642400798-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "63985eb5e5fb47958fd673bd0ce73f2d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0954fad3-d24d-496c-83e6-a09d3cb556fc", "external-id": "nsx-vlan-transportzone-216", "segmentation_id": 216, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap120267fe-85", "ovs_interfaceid": "120267fe-8525-41c2-868c-f8e2af61464c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.124771] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944291, 'name': CloneVM_Task} progress is 95%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.159362] env[68437]: DEBUG oslo_vmware.api [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944295, 'name': PowerOffVM_Task, 'duration_secs': 0.251391} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.162106] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 898.162295] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 898.162890] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4e61da0-3423-4b80-8a0e-6ae8a5fe1221 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.231176] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.238877] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 898.239178] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 898.239388] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Deleting the datastore file [datastore1] 39c532b1-b05e-4354-ad8f-9223b06e9488 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 898.239731] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a3efed6-b97c-46d5-b8be-89688ac07e84 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.248177] env[68437]: DEBUG oslo_vmware.api [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for the task: (returnval){ [ 898.248177] env[68437]: value = "task-2944298" [ 898.248177] env[68437]: _type = "Task" [ 898.248177] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.264021] env[68437]: DEBUG oslo_vmware.api [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.318717] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "4f46132c-155d-4def-b017-7fd84e37eed5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.318950] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "4f46132c-155d-4def-b017-7fd84e37eed5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.324994] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93d0b06-e1aa-44c3-99b1-1c71bf77709c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.331968] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b6264c-bf9b-40bf-85fe-311e7af39786 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.362854] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ca594e-6442-4129-8f7e-c771094ef398 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.373435] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c09aa9-26ab-4fa3-969f-26150bbca3ea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.384828] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b7cef4-dd54-d3cb-efd6-d20fb1159b99, 'name': SearchDatastore_Task, 'duration_secs': 0.013035} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.394765] env[68437]: DEBUG nova.compute.provider_tree [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.396659] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38cc0cee-0838-4c55-b47b-600bf1ddfdd0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.402508] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 898.402508] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526c7500-ab0b-42a6-9299-63a1228850a1" [ 898.402508] env[68437]: _type = "Task" [ 898.402508] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.417419] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526c7500-ab0b-42a6-9299-63a1228850a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.421099] env[68437]: DEBUG oslo_vmware.api [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944296, 'name': PowerOffVM_Task, 'duration_secs': 0.353896} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.421417] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 898.421696] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 898.421949] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83c3a5c6-3ef2-47a2-830c-0f0b37a9ddf0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.434495] env[68437]: INFO nova.compute.manager [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance disappeared during terminate [ 898.434712] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a932d1ca-936e-4c19-8397-ce0a7cae8d23 tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "26985e45-21ff-40bb-ac2b-c6f3700ccc97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.247s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.458571] env[68437]: DEBUG oslo_vmware.api [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944294, 'name': PowerOnVM_Task, 'duration_secs': 0.723492} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.458840] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.459045] env[68437]: INFO nova.compute.manager [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Took 10.41 seconds to spawn the instance on the hypervisor. [ 898.459231] env[68437]: DEBUG nova.compute.manager [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 898.460087] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231588dc-fbe3-4167-8c72-28679fe76c9a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.495631] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 898.496024] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 898.496114] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Deleting the datastore file [datastore2] de54bc8d-2626-41fc-970a-865a842a932e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 898.496350] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-712a8232-0021-4311-a003-713035f0a2f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.505182] env[68437]: DEBUG oslo_vmware.api [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for the task: (returnval){ [ 898.505182] env[68437]: value = "task-2944300" [ 898.505182] env[68437]: _type = "Task" [ 898.505182] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.515417] env[68437]: DEBUG oslo_vmware.api [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944300, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.584023] env[68437]: DEBUG oslo_concurrency.lockutils [req-676ecd7d-b73a-43e9-a1dd-fbe3ebc93094 req-d36e70b2-4ec8-4677-9937-2be8de601970 service nova] Releasing lock "refresh_cache-832c99fc-0f09-4ccb-96f9-894ce62eb17e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.617404] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944291, 'name': CloneVM_Task, 'duration_secs': 1.78458} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.617404] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Created linked-clone VM from snapshot [ 898.618042] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef0375d-5ae9-4d3f-9a49-a0087c816aa2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.626568] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Uploading image 10f6eb30-f78d-487c-b50f-3e423a5ce5e1 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 898.655314] env[68437]: DEBUG oslo_vmware.rw_handles [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 898.655314] env[68437]: value = "vm-591004" [ 898.655314] env[68437]: _type = "VirtualMachine" [ 898.655314] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 898.655596] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-36224843-e342-4ef7-85d2-99074fb157f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.662878] env[68437]: DEBUG oslo_vmware.rw_handles [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lease: (returnval){ [ 898.662878] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c56710-636a-50ed-2256-73731e92b866" [ 898.662878] env[68437]: _type = "HttpNfcLease" [ 898.662878] env[68437]: } obtained for exporting VM: (result){ [ 898.662878] env[68437]: value = "vm-591004" [ 898.662878] env[68437]: _type = "VirtualMachine" [ 898.662878] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 898.663191] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the lease: (returnval){ [ 898.663191] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c56710-636a-50ed-2256-73731e92b866" [ 898.663191] env[68437]: _type = "HttpNfcLease" [ 898.663191] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 898.670412] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 898.670412] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c56710-636a-50ed-2256-73731e92b866" [ 898.670412] env[68437]: _type = "HttpNfcLease" [ 898.670412] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 898.688345] env[68437]: DEBUG nova.network.neutron [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance_info_cache with network_info: [{"id": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "address": "fa:16:3e:17:4e:f5", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c053c01-e5", "ovs_interfaceid": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.764950] env[68437]: DEBUG oslo_vmware.api [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.822092] env[68437]: DEBUG nova.compute.manager [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 898.901033] env[68437]: DEBUG nova.scheduler.client.report [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.913918] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526c7500-ab0b-42a6-9299-63a1228850a1, 'name': SearchDatastore_Task, 'duration_secs': 0.021906} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.914202] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.914449] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 832c99fc-0f09-4ccb-96f9-894ce62eb17e/832c99fc-0f09-4ccb-96f9-894ce62eb17e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.914695] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ece35c4-9931-4474-8d3d-fb5769c3f002 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.920598] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 898.920598] env[68437]: value = "task-2944302" [ 898.920598] env[68437]: _type = "Task" [ 898.920598] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.930202] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.979813] env[68437]: INFO nova.compute.manager [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Took 48.92 seconds to build instance. [ 899.016410] env[68437]: DEBUG oslo_vmware.api [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944300, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.171113] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 899.171113] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c56710-636a-50ed-2256-73731e92b866" [ 899.171113] env[68437]: _type = "HttpNfcLease" [ 899.171113] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 899.171544] env[68437]: DEBUG oslo_vmware.rw_handles [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 899.171544] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c56710-636a-50ed-2256-73731e92b866" [ 899.171544] env[68437]: _type = "HttpNfcLease" [ 899.171544] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 899.172207] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222a7bd0-3804-41ae-a13f-f147d84ae153 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.179422] env[68437]: DEBUG oslo_vmware.rw_handles [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb6ec5-3bf6-8dfb-5a03-9252970a2e9b/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 899.179624] env[68437]: DEBUG oslo_vmware.rw_handles [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb6ec5-3bf6-8dfb-5a03-9252970a2e9b/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 899.239138] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.262936] env[68437]: DEBUG oslo_vmware.api [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Task: {'id': task-2944298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.592395} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.263288] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 899.263515] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 899.263706] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 899.263901] env[68437]: INFO nova.compute.manager [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Took 1.63 seconds to destroy the instance on the hypervisor. [ 899.264170] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.264362] env[68437]: DEBUG nova.compute.manager [-] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 899.264457] env[68437]: DEBUG nova.network.neutron [-] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 899.312985] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "ba0d8067-a617-4910-b2f6-33a7be461f8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.313328] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "ba0d8067-a617-4910-b2f6-33a7be461f8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.313579] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "ba0d8067-a617-4910-b2f6-33a7be461f8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.313785] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "ba0d8067-a617-4910-b2f6-33a7be461f8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.313995] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "ba0d8067-a617-4910-b2f6-33a7be461f8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.316263] env[68437]: INFO nova.compute.manager [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Terminating instance [ 899.349945] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.366641] env[68437]: INFO nova.compute.manager [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Rescuing [ 899.366956] env[68437]: DEBUG oslo_concurrency.lockutils [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.367251] env[68437]: DEBUG oslo_concurrency.lockutils [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquired lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.367579] env[68437]: DEBUG nova.network.neutron [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 899.402095] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-485b1fa8-a5e3-4074-b90b-4a97d32b9491 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.409178] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.409570] env[68437]: DEBUG nova.compute.manager [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 899.413269] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.185s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.413269] env[68437]: DEBUG nova.objects.instance [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lazy-loading 'resources' on Instance uuid ed1a81fd-dd4b-4126-96de-3c3f67cdca31 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.438128] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944302, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.481819] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b935a971-7c55-467e-9e8d-9370467f2e44 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.512s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.518544] env[68437]: DEBUG oslo_vmware.api [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Task: {'id': task-2944300, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.525575} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.519336] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 899.519336] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 899.519336] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 899.519536] env[68437]: INFO nova.compute.manager [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Took 1.64 seconds to destroy the instance on the hypervisor. [ 899.519714] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.519989] env[68437]: DEBUG nova.compute.manager [-] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 899.521683] env[68437]: DEBUG nova.network.neutron [-] [instance: de54bc8d-2626-41fc-970a-865a842a932e] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 899.580327] env[68437]: DEBUG nova.compute.manager [req-98673126-622a-4096-9c3d-ab8f4b5bfa92 req-fb10994f-e554-4f7d-a68e-76b879d6d6b9 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Received event network-vif-deleted-c09d45df-fef7-4b7f-ac2b-cea270301ba4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 899.580979] env[68437]: INFO nova.compute.manager [req-98673126-622a-4096-9c3d-ab8f4b5bfa92 req-fb10994f-e554-4f7d-a68e-76b879d6d6b9 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Neutron deleted interface c09d45df-fef7-4b7f-ac2b-cea270301ba4; detaching it from the instance and deleting it from the info cache [ 899.581389] env[68437]: DEBUG nova.network.neutron [req-98673126-622a-4096-9c3d-ab8f4b5bfa92 req-fb10994f-e554-4f7d-a68e-76b879d6d6b9 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.744993] env[68437]: DEBUG nova.compute.manager [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68437) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 899.745274] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.820167] env[68437]: DEBUG nova.compute.manager [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 899.820442] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.821464] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ca2573-f68f-4dc0-92d3-2c8c1534ae03 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.833262] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.833583] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45475333-326f-495a-b18e-09541f38b3e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.841120] env[68437]: DEBUG oslo_vmware.api [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 899.841120] env[68437]: value = "task-2944303" [ 899.841120] env[68437]: _type = "Task" [ 899.841120] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.848781] env[68437]: DEBUG nova.compute.manager [req-4d9cd3c7-e9d7-4b99-a074-e43e392a45c4 req-bb1e5af2-efc6-4137-9342-d8824c599371 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Received event network-vif-deleted-dff04cc6-cf18-4191-b01e-acba3b9b03b3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 899.849647] env[68437]: INFO nova.compute.manager [req-4d9cd3c7-e9d7-4b99-a074-e43e392a45c4 req-bb1e5af2-efc6-4137-9342-d8824c599371 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Neutron deleted interface dff04cc6-cf18-4191-b01e-acba3b9b03b3; detaching it from the instance and deleting it from the info cache [ 899.849960] env[68437]: DEBUG nova.network.neutron [req-4d9cd3c7-e9d7-4b99-a074-e43e392a45c4 req-bb1e5af2-efc6-4137-9342-d8824c599371 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.857428] env[68437]: DEBUG oslo_vmware.api [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944303, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.918051] env[68437]: DEBUG nova.compute.utils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 899.925192] env[68437]: DEBUG nova.compute.manager [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 899.926041] env[68437]: DEBUG nova.network.neutron [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 899.953957] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944302, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68361} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.954477] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 832c99fc-0f09-4ccb-96f9-894ce62eb17e/832c99fc-0f09-4ccb-96f9-894ce62eb17e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.957972] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.957972] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0adfa3ea-51c6-4399-a94a-06a08c2e16f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.968943] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 899.968943] env[68437]: value = "task-2944304" [ 899.968943] env[68437]: _type = "Task" [ 899.968943] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.980725] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944304, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.997594] env[68437]: DEBUG nova.policy [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7605d44a5b5448a3966872b4f524d13c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40d8becefc85431b9723c72aa09d152b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 900.054468] env[68437]: DEBUG nova.network.neutron [-] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.083696] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b514e80-8a3d-4a4e-8950-2c744c9c8817 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.100281] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73225658-1b53-4129-b031-b05abffac11b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.151683] env[68437]: DEBUG nova.compute.manager [req-98673126-622a-4096-9c3d-ab8f4b5bfa92 req-fb10994f-e554-4f7d-a68e-76b879d6d6b9 service nova] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Detach interface failed, port_id=c09d45df-fef7-4b7f-ac2b-cea270301ba4, reason: Instance 39c532b1-b05e-4354-ad8f-9223b06e9488 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 900.290509] env[68437]: DEBUG nova.network.neutron [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updating instance_info_cache with network_info: [{"id": "9b76d917-ecbb-45a2-8959-400914c3c584", "address": "fa:16:3e:26:14:b0", "network": {"id": "d686d794-6af9-44b6-b23a-faf14c4ed0b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1123360152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e774b300870a495ca1cc652d8920d32a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b76d917-ec", "ovs_interfaceid": "9b76d917-ecbb-45a2-8959-400914c3c584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.325629] env[68437]: DEBUG nova.network.neutron [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Successfully created port: 28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.330317] env[68437]: DEBUG nova.network.neutron [-] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.355201] env[68437]: DEBUG oslo_vmware.api [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944303, 'name': PowerOffVM_Task, 'duration_secs': 0.227888} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.355201] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 900.355201] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 900.355201] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87a54ba7-1949-493f-a0e6-ed1ab9f45be3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.355714] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbb29e29-929f-4a72-9d01-a067a18ecd2b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.366670] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f36775-c7d4-4b9f-886b-25c339778933 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.406017] env[68437]: DEBUG nova.compute.manager [req-4d9cd3c7-e9d7-4b99-a074-e43e392a45c4 req-bb1e5af2-efc6-4137-9342-d8824c599371 service nova] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Detach interface failed, port_id=dff04cc6-cf18-4191-b01e-acba3b9b03b3, reason: Instance de54bc8d-2626-41fc-970a-865a842a932e could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 900.434940] env[68437]: DEBUG nova.compute.manager [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 900.484934] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944304, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.145005} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.485038] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 900.485814] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa38555-4131-4bd0-8b1f-00228875a59b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.513793] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 832c99fc-0f09-4ccb-96f9-894ce62eb17e/832c99fc-0f09-4ccb-96f9-894ce62eb17e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.514499] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ad38b1c-f512-494f-b154-6b4320ae2ba2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.539645] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 900.539645] env[68437]: value = "task-2944306" [ 900.539645] env[68437]: _type = "Task" [ 900.539645] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.547764] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944306, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.559034] env[68437]: INFO nova.compute.manager [-] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Took 1.29 seconds to deallocate network for instance. [ 900.626852] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1d3122f-6f0a-4229-854e-71d4fc056d1a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.635833] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f704272a-e028-4a30-94b1-5b4a6da9a127 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.671662] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aaed260-ead8-4d46-8194-b5a164ff5de0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.679524] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47dfe2d6-b224-4293-9701-975cfa724250 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.698591] env[68437]: DEBUG nova.compute.provider_tree [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.794455] env[68437]: DEBUG oslo_concurrency.lockutils [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Releasing lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.833607] env[68437]: INFO nova.compute.manager [-] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Took 1.31 seconds to deallocate network for instance. [ 901.051290] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944306, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.069269] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.202491] env[68437]: DEBUG nova.scheduler.client.report [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.341169] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.445578] env[68437]: DEBUG nova.compute.manager [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 901.468830] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 901.469166] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.469407] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 901.469669] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.469961] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 901.470203] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 901.470520] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 901.471022] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 901.471022] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 901.471215] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 901.472052] env[68437]: DEBUG nova.virt.hardware [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 901.472551] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1a5132-177e-41bf-8956-cd6d04881331 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.481712] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78bd8f76-4ec6-4968-a21a-00835aa22b97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.549201] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944306, 'name': ReconfigVM_Task, 'duration_secs': 0.806881} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.549500] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 832c99fc-0f09-4ccb-96f9-894ce62eb17e/832c99fc-0f09-4ccb-96f9-894ce62eb17e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 901.550276] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-339ff338-1891-47d2-b029-9fee9cca30e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.556708] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 901.556708] env[68437]: value = "task-2944307" [ 901.556708] env[68437]: _type = "Task" [ 901.556708] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.564603] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944307, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.708077] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.295s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.710576] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.347s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.710852] env[68437]: DEBUG nova.objects.instance [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lazy-loading 'resources' on Instance uuid fc62ff9d-1bd8-4b32-9e71-41410276802d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.742877] env[68437]: INFO nova.scheduler.client.report [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleted allocations for instance ed1a81fd-dd4b-4126-96de-3c3f67cdca31 [ 901.834981] env[68437]: DEBUG nova.compute.manager [req-d2cfe39a-5c09-4362-b668-19c95c95efd5 req-3562f65c-c298-4ccd-b2e7-9de0ca1d84d6 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Received event network-vif-plugged-28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 901.835320] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2cfe39a-5c09-4362-b668-19c95c95efd5 req-3562f65c-c298-4ccd-b2e7-9de0ca1d84d6 service nova] Acquiring lock "353ebb37-7e69-49d4-873e-2272cbfff6e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.835525] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2cfe39a-5c09-4362-b668-19c95c95efd5 req-3562f65c-c298-4ccd-b2e7-9de0ca1d84d6 service nova] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.835752] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2cfe39a-5c09-4362-b668-19c95c95efd5 req-3562f65c-c298-4ccd-b2e7-9de0ca1d84d6 service nova] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.835926] env[68437]: DEBUG nova.compute.manager [req-d2cfe39a-5c09-4362-b668-19c95c95efd5 req-3562f65c-c298-4ccd-b2e7-9de0ca1d84d6 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] No waiting events found dispatching network-vif-plugged-28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 901.836743] env[68437]: WARNING nova.compute.manager [req-d2cfe39a-5c09-4362-b668-19c95c95efd5 req-3562f65c-c298-4ccd-b2e7-9de0ca1d84d6 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Received unexpected event network-vif-plugged-28d9f2cf-baaf-4817-acdb-525b41381e45 for instance with vm_state building and task_state spawning. [ 901.895537] env[68437]: DEBUG nova.network.neutron [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Successfully updated port: 28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 902.066652] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944307, 'name': Rename_Task, 'duration_secs': 0.184869} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.066949] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.067217] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b025d7df-40d8-44ff-aea7-f33e707346ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.074379] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 902.074379] env[68437]: value = "task-2944308" [ 902.074379] env[68437]: _type = "Task" [ 902.074379] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.081937] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944308, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.251954] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d99e350a-3485-48e5-adaa-cb70e4ef46d5 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "ed1a81fd-dd4b-4126-96de-3c3f67cdca31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.634s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.327957] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.328292] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-704675f1-58a2-4830-854f-e105fa6e1fbc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.339030] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 902.339030] env[68437]: value = "task-2944309" [ 902.339030] env[68437]: _type = "Task" [ 902.339030] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.349657] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944309, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.399364] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.399425] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.399590] env[68437]: DEBUG nova.network.neutron [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 902.589027] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944308, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.682152] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311f13b0-af27-4ba2-9639-8d5455c694fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.691239] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888a214d-1c0e-4517-8238-3a0744d1adeb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.725735] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7afd100-a86b-4239-99b2-7e26653b3e47 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.733111] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df307761-249b-427f-98d9-026523be4092 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.746657] env[68437]: DEBUG nova.compute.provider_tree [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.849470] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944309, 'name': PowerOffVM_Task, 'duration_secs': 0.255325} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.849758] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 902.850620] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03df25e0-da24-4956-a1dd-cb749a020218 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.869913] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b251bd1f-5cbf-4c84-b5a2-bc373e4688de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.911986] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.912221] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef5494e0-7445-43c6-ba8d-bd8155c781b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.919482] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 902.919482] env[68437]: value = "task-2944310" [ 902.919482] env[68437]: _type = "Task" [ 902.919482] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.928238] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944310, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.936157] env[68437]: DEBUG nova.network.neutron [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 903.078296] env[68437]: DEBUG nova.network.neutron [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Updating instance_info_cache with network_info: [{"id": "28d9f2cf-baaf-4817-acdb-525b41381e45", "address": "fa:16:3e:78:2e:6c", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28d9f2cf-ba", "ovs_interfaceid": "28d9f2cf-baaf-4817-acdb-525b41381e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.089968] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944308, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.250577] env[68437]: DEBUG nova.scheduler.client.report [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.430083] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 903.430369] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 903.430662] env[68437]: DEBUG oslo_concurrency.lockutils [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.430816] env[68437]: DEBUG oslo_concurrency.lockutils [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.431027] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.431303] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bc268a8-81e5-4ee0-84d8-bf672e9574f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.441070] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.441324] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 903.442100] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0771e20-d021-402f-9fb2-07c287a64491 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.448420] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 903.448420] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52fe0dca-c076-bb9e-55fb-68d1e7f1e58d" [ 903.448420] env[68437]: _type = "Task" [ 903.448420] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.457093] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fe0dca-c076-bb9e-55fb-68d1e7f1e58d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.585076] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.585410] env[68437]: DEBUG nova.compute.manager [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Instance network_info: |[{"id": "28d9f2cf-baaf-4817-acdb-525b41381e45", "address": "fa:16:3e:78:2e:6c", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28d9f2cf-ba", "ovs_interfaceid": "28d9f2cf-baaf-4817-acdb-525b41381e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 903.585827] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:2e:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28d9f2cf-baaf-4817-acdb-525b41381e45', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 903.593310] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Creating folder: Project (40d8becefc85431b9723c72aa09d152b). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 903.596837] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a179be39-923a-4aad-886d-105124ecd35c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.598587] env[68437]: DEBUG oslo_vmware.api [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944308, 'name': PowerOnVM_Task, 'duration_secs': 1.120261} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.598834] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 903.599047] env[68437]: INFO nova.compute.manager [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Took 9.43 seconds to spawn the instance on the hypervisor. [ 903.599231] env[68437]: DEBUG nova.compute.manager [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 903.600488] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1395281-9e00-420c-a775-4cbdddc49e45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.610809] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Created folder: Project (40d8becefc85431b9723c72aa09d152b) in parent group-v590848. [ 903.611149] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Creating folder: Instances. Parent ref: group-v591006. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 903.611849] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36e4caf5-5ce1-48bb-b95f-6d129ca02eed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.621751] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Created folder: Instances in parent group-v591006. [ 903.621997] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 903.622210] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 903.623125] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4037cf9f-4ff2-41a0-aeb3-abaf0d117361 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.640994] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 903.640994] env[68437]: value = "task-2944313" [ 903.640994] env[68437]: _type = "Task" [ 903.640994] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.649490] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944313, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.759749] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.049s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.763127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 33.843s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.763127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.763127] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 903.763127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.320s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.763564] env[68437]: DEBUG nova.objects.instance [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lazy-loading 'resources' on Instance uuid 9a7c248f-5262-4f03-aace-f22c4976bb0f {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.765134] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218b1155-ad6d-4983-a51f-5c6069d9da10 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.773598] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0491f8-ea4f-4f98-ac5d-74b2af80679f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.788785] env[68437]: INFO nova.scheduler.client.report [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Deleted allocations for instance fc62ff9d-1bd8-4b32-9e71-41410276802d [ 903.790375] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9653c047-2a4d-4e3f-9f47-99a96b12b89a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.800027] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6beee172-be24-4bcd-b623-20fa5da74b0e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.832441] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178168MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 903.833372] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.884684] env[68437]: DEBUG nova.compute.manager [req-d26cb900-182e-4f04-bc7a-03e23d08332d req-3c5eed95-e1ad-44a5-a585-900f944c74e3 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Received event network-changed-28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 903.884958] env[68437]: DEBUG nova.compute.manager [req-d26cb900-182e-4f04-bc7a-03e23d08332d req-3c5eed95-e1ad-44a5-a585-900f944c74e3 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Refreshing instance network info cache due to event network-changed-28d9f2cf-baaf-4817-acdb-525b41381e45. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 903.885284] env[68437]: DEBUG oslo_concurrency.lockutils [req-d26cb900-182e-4f04-bc7a-03e23d08332d req-3c5eed95-e1ad-44a5-a585-900f944c74e3 service nova] Acquiring lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.885501] env[68437]: DEBUG oslo_concurrency.lockutils [req-d26cb900-182e-4f04-bc7a-03e23d08332d req-3c5eed95-e1ad-44a5-a585-900f944c74e3 service nova] Acquired lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.885743] env[68437]: DEBUG nova.network.neutron [req-d26cb900-182e-4f04-bc7a-03e23d08332d req-3c5eed95-e1ad-44a5-a585-900f944c74e3 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Refreshing network info cache for port 28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 903.959661] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fe0dca-c076-bb9e-55fb-68d1e7f1e58d, 'name': SearchDatastore_Task, 'duration_secs': 0.033171} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.961057] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da32df9e-66c7-4c13-974f-c751e47ea604 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.967548] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 903.967548] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521b4262-674f-cc27-0eb6-53bdfc188b14" [ 903.967548] env[68437]: _type = "Task" [ 903.967548] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.975676] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521b4262-674f-cc27-0eb6-53bdfc188b14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.119054] env[68437]: INFO nova.compute.manager [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Took 46.06 seconds to build instance. [ 904.151302] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944313, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.298314] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ed6154d-9f74-473e-8d44-43411aa60996 tempest-MultipleCreateTestJSON-1530070832 tempest-MultipleCreateTestJSON-1530070832-project-member] Lock "fc62ff9d-1bd8-4b32-9e71-41410276802d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.474s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.310881] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 904.311234] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 904.311471] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleting the datastore file [datastore1] ba0d8067-a617-4910-b2f6-33a7be461f8e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 904.312337] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23c37042-f699-413c-8733-14528f7aa57c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.321144] env[68437]: DEBUG oslo_vmware.api [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 904.321144] env[68437]: value = "task-2944314" [ 904.321144] env[68437]: _type = "Task" [ 904.321144] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.332173] env[68437]: DEBUG oslo_vmware.api [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944314, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.480990] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521b4262-674f-cc27-0eb6-53bdfc188b14, 'name': SearchDatastore_Task, 'duration_secs': 0.018637} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.483864] env[68437]: DEBUG oslo_concurrency.lockutils [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.484335] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] c9d26fd4-f780-4986-8a5f-dea041a70f5d/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. {{(pid=68437) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 904.484802] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddc19007-bc18-44a6-b151-01fc8a0f9ae0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.494691] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 904.494691] env[68437]: value = "task-2944315" [ 904.494691] env[68437]: _type = "Task" [ 904.494691] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.501956] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944315, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.621321] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4c63961d-20c6-4775-92f9-31f25cbcb757 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.702s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.629903] env[68437]: DEBUG nova.network.neutron [req-d26cb900-182e-4f04-bc7a-03e23d08332d req-3c5eed95-e1ad-44a5-a585-900f944c74e3 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Updated VIF entry in instance network info cache for port 28d9f2cf-baaf-4817-acdb-525b41381e45. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 904.631267] env[68437]: DEBUG nova.network.neutron [req-d26cb900-182e-4f04-bc7a-03e23d08332d req-3c5eed95-e1ad-44a5-a585-900f944c74e3 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Updating instance_info_cache with network_info: [{"id": "28d9f2cf-baaf-4817-acdb-525b41381e45", "address": "fa:16:3e:78:2e:6c", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28d9f2cf-ba", "ovs_interfaceid": "28d9f2cf-baaf-4817-acdb-525b41381e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.656611] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944313, 'name': CreateVM_Task, 'duration_secs': 0.603793} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.656791] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 904.657493] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.658890] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.658890] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 904.658890] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1ffd111-4f58-4d94-9761-378ffebbd023 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.663235] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 904.663235] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b00810-3e28-be55-4682-9401b0ef9310" [ 904.663235] env[68437]: _type = "Task" [ 904.663235] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.676880] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b00810-3e28-be55-4682-9401b0ef9310, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.750790] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79844eea-e210-45cf-8fea-d2dbcf955037 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.758741] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a6b913-7f55-4d4b-87b4-bcf9a2c09ece {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.792724] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfd0d90-f7de-42d1-9e77-5570ae5d19c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.801120] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62d0d9b-9e8e-4ab5-ac9e-828dc86c04e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.817709] env[68437]: DEBUG nova.compute.provider_tree [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.832983] env[68437]: DEBUG oslo_vmware.api [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944314, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268971} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.833425] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 904.833425] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 904.833617] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 904.833771] env[68437]: INFO nova.compute.manager [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Took 5.01 seconds to destroy the instance on the hypervisor. [ 904.834041] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 904.834246] env[68437]: DEBUG nova.compute.manager [-] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 904.834346] env[68437]: DEBUG nova.network.neutron [-] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 905.005457] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944315, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510722} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.005849] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] c9d26fd4-f780-4986-8a5f-dea041a70f5d/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. [ 905.006770] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dbe369-2d4d-411b-88cd-3c2554cd9d49 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.031940] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] c9d26fd4-f780-4986-8a5f-dea041a70f5d/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.032267] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c49dc814-4e97-4df2-960d-a8e732565580 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.050165] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 905.050165] env[68437]: value = "task-2944316" [ 905.050165] env[68437]: _type = "Task" [ 905.050165] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.059792] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944316, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.135174] env[68437]: DEBUG oslo_concurrency.lockutils [req-d26cb900-182e-4f04-bc7a-03e23d08332d req-3c5eed95-e1ad-44a5-a585-900f944c74e3 service nova] Releasing lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.173607] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b00810-3e28-be55-4682-9401b0ef9310, 'name': SearchDatastore_Task, 'duration_secs': 0.013381} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.173909] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.174163] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 905.174401] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.174547] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.174719] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 905.174986] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1772b9a3-e8fe-4efb-af3b-1415824f8050 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.183562] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 905.183763] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 905.184525] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68e4c8b1-4187-4b81-835a-f10b59cc53de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.189960] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 905.189960] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52324192-ae42-26a7-33e8-27b02357f852" [ 905.189960] env[68437]: _type = "Task" [ 905.189960] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.198066] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52324192-ae42-26a7-33e8-27b02357f852, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.324967] env[68437]: DEBUG nova.scheduler.client.report [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 905.561813] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944316, 'name': ReconfigVM_Task, 'duration_secs': 0.337593} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.562227] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Reconfigured VM instance instance-0000003a to attach disk [datastore2] c9d26fd4-f780-4986-8a5f-dea041a70f5d/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.563194] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a430ed22-3a5a-4e44-83a5-caf40eb3d998 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.588948] env[68437]: DEBUG nova.network.neutron [-] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.590246] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-411d3986-766b-4d75-b732-55123d9359b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.608224] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 905.608224] env[68437]: value = "task-2944317" [ 905.608224] env[68437]: _type = "Task" [ 905.608224] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.618835] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944317, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.701102] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52324192-ae42-26a7-33e8-27b02357f852, 'name': SearchDatastore_Task, 'duration_secs': 0.013264} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.701987] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-193fa66c-9386-4e3c-bf0e-454d9ca6fcb0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.708690] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 905.708690] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529b1234-73ed-8682-8004-331f90a16613" [ 905.708690] env[68437]: _type = "Task" [ 905.708690] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.716167] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529b1234-73ed-8682-8004-331f90a16613, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.834027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.067s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.834027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.688s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.836172] env[68437]: INFO nova.compute.claims [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.872588] env[68437]: INFO nova.scheduler.client.report [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Deleted allocations for instance 9a7c248f-5262-4f03-aace-f22c4976bb0f [ 905.921151] env[68437]: DEBUG nova.compute.manager [req-deba0efe-0f93-45a1-af06-fea48318bebd req-193bbe4b-db62-4a38-bb69-b49118e1826d service nova] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Received event network-vif-deleted-7a0fc670-7d4b-404c-bd1b-73f31da1c3d4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 906.103782] env[68437]: INFO nova.compute.manager [-] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Took 1.27 seconds to deallocate network for instance. [ 906.118759] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944317, 'name': ReconfigVM_Task, 'duration_secs': 0.17536} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.119080] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.119361] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edef709b-6e83-4932-9ad2-4dc5dad20931 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.127096] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 906.127096] env[68437]: value = "task-2944318" [ 906.127096] env[68437]: _type = "Task" [ 906.127096] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.137743] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.218743] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529b1234-73ed-8682-8004-331f90a16613, 'name': SearchDatastore_Task, 'duration_secs': 0.014626} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.219197] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.219990] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 353ebb37-7e69-49d4-873e-2272cbfff6e8/353ebb37-7e69-49d4-873e-2272cbfff6e8.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 906.220436] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6de63ead-90d7-45d7-95a4-952ba40862ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.227052] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 906.227052] env[68437]: value = "task-2944319" [ 906.227052] env[68437]: _type = "Task" [ 906.227052] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.237551] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.385718] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dee8899-8e12-43e2-834e-c08cd7010b4a tempest-ListServerFiltersTestJSON-599693446 tempest-ListServerFiltersTestJSON-599693446-project-member] Lock "9a7c248f-5262-4f03-aace-f22c4976bb0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.480s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.612235] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.640979] env[68437]: DEBUG oslo_vmware.api [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944318, 'name': PowerOnVM_Task, 'duration_secs': 0.401022} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.640979] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 906.643707] env[68437]: DEBUG nova.compute.manager [None req-beda14e9-5357-45db-97b3-88e6c67f7613 tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 906.644557] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5592d99-df47-40fe-ae70-6f3562a4bfde {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.745908] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944319, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.242960] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765887} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.242960] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 353ebb37-7e69-49d4-873e-2272cbfff6e8/353ebb37-7e69-49d4-873e-2272cbfff6e8.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 907.243126] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 907.243356] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cff32c7e-7227-4d71-8c63-d7b185a8a498 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.250105] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 907.250105] env[68437]: value = "task-2944320" [ 907.250105] env[68437]: _type = "Task" [ 907.250105] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.258867] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944320, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.359229] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57f46f0-4de5-42a5-a8e3-9d65345d5d60 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.367292] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e415b06-a5ed-4d5f-a4d3-a361249392f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.401266] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff43107c-049c-4daf-b4df-f3b7e5c80fbe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.407707] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8254e3a8-ef86-4280-b661-5643d24e498f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.421805] env[68437]: DEBUG nova.compute.provider_tree [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.589851] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.590192] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.590417] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.590603] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.590772] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.593878] env[68437]: INFO nova.compute.manager [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Terminating instance [ 907.760815] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944320, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082393} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.765152] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 907.765152] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11475423-a905-4f1a-b412-d9da313bee3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.786138] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 353ebb37-7e69-49d4-873e-2272cbfff6e8/353ebb37-7e69-49d4-873e-2272cbfff6e8.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 907.787756] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61306e4a-3d43-47fd-8953-368f5a1a2e88 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.806810] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 907.806810] env[68437]: value = "task-2944321" [ 907.806810] env[68437]: _type = "Task" [ 907.806810] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.814955] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944321, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.925626] env[68437]: DEBUG nova.scheduler.client.report [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.101023] env[68437]: DEBUG nova.compute.manager [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 908.101023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 908.101023] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b685e34-8b31-407d-82ef-bf01a0e5643e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.107484] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.107942] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1b347f0-7eb9-4f4d-8ae8-ce76261723e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.116307] env[68437]: DEBUG oslo_vmware.api [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 908.116307] env[68437]: value = "task-2944322" [ 908.116307] env[68437]: _type = "Task" [ 908.116307] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.130284] env[68437]: DEBUG oslo_vmware.api [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.317168] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944321, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.425170] env[68437]: DEBUG nova.compute.manager [req-bbdbd3ef-4d39-4c9c-8c77-53971fa13606 req-ef8d987d-da3e-4d76-86ef-a5998024e5fa service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Received event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 908.425170] env[68437]: DEBUG nova.compute.manager [req-bbdbd3ef-4d39-4c9c-8c77-53971fa13606 req-ef8d987d-da3e-4d76-86ef-a5998024e5fa service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing instance network info cache due to event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 908.425170] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbdbd3ef-4d39-4c9c-8c77-53971fa13606 req-ef8d987d-da3e-4d76-86ef-a5998024e5fa service nova] Acquiring lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.425705] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbdbd3ef-4d39-4c9c-8c77-53971fa13606 req-ef8d987d-da3e-4d76-86ef-a5998024e5fa service nova] Acquired lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.426065] env[68437]: DEBUG nova.network.neutron [req-bbdbd3ef-4d39-4c9c-8c77-53971fa13606 req-ef8d987d-da3e-4d76-86ef-a5998024e5fa service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing network info cache for port 9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 908.437018] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.437018] env[68437]: DEBUG nova.compute.manager [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 908.438501] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.026s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.441503] env[68437]: INFO nova.compute.claims [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.628484] env[68437]: DEBUG oslo_vmware.api [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944322, 'name': PowerOffVM_Task, 'duration_secs': 0.253624} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.628890] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.629125] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.629805] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e924ee00-49d2-4240-a5cd-5847d9322bba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.695246] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.695592] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.695790] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Deleting the datastore file [datastore1] 832c99fc-0f09-4ccb-96f9-894ce62eb17e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.696087] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-942331bf-a57f-4867-96d4-f8ff06006468 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.703950] env[68437]: DEBUG oslo_vmware.api [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 908.703950] env[68437]: value = "task-2944324" [ 908.703950] env[68437]: _type = "Task" [ 908.703950] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.714218] env[68437]: DEBUG oslo_vmware.api [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944324, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.771816] env[68437]: DEBUG oslo_vmware.rw_handles [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb6ec5-3bf6-8dfb-5a03-9252970a2e9b/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 908.772757] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e913ee-445c-47a2-9a44-ac5d1b2fd1a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.779648] env[68437]: DEBUG oslo_vmware.rw_handles [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb6ec5-3bf6-8dfb-5a03-9252970a2e9b/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 908.779851] env[68437]: ERROR oslo_vmware.rw_handles [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb6ec5-3bf6-8dfb-5a03-9252970a2e9b/disk-0.vmdk due to incomplete transfer. [ 908.780124] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e858cd90-f87d-4b9a-85e5-5a99790f7731 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.788487] env[68437]: DEBUG oslo_vmware.rw_handles [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52bb6ec5-3bf6-8dfb-5a03-9252970a2e9b/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 908.788739] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Uploaded image 10f6eb30-f78d-487c-b50f-3e423a5ce5e1 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 908.792795] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 908.793086] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5c566de0-9f8d-4fa9-9cfd-145314180a81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.800505] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 908.800505] env[68437]: value = "task-2944325" [ 908.800505] env[68437]: _type = "Task" [ 908.800505] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.814867] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944325, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.820840] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944321, 'name': ReconfigVM_Task, 'duration_secs': 0.73595} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.821161] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 353ebb37-7e69-49d4-873e-2272cbfff6e8/353ebb37-7e69-49d4-873e-2272cbfff6e8.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.821804] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc5cbdab-e4ed-4b90-aea7-74a94df77ba4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.829042] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 908.829042] env[68437]: value = "task-2944326" [ 908.829042] env[68437]: _type = "Task" [ 908.829042] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.839694] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944326, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.950285] env[68437]: DEBUG nova.compute.utils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 908.954962] env[68437]: DEBUG nova.compute.manager [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 908.958666] env[68437]: DEBUG nova.network.neutron [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 909.115436] env[68437]: DEBUG nova.policy [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50665629ca424852a9ae988b5b76d76a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd590545b752249c184b49dc720b78944', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 909.212583] env[68437]: DEBUG nova.network.neutron [req-bbdbd3ef-4d39-4c9c-8c77-53971fa13606 req-ef8d987d-da3e-4d76-86ef-a5998024e5fa service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updated VIF entry in instance network info cache for port 9b76d917-ecbb-45a2-8959-400914c3c584. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 909.213434] env[68437]: DEBUG nova.network.neutron [req-bbdbd3ef-4d39-4c9c-8c77-53971fa13606 req-ef8d987d-da3e-4d76-86ef-a5998024e5fa service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updating instance_info_cache with network_info: [{"id": "9b76d917-ecbb-45a2-8959-400914c3c584", "address": "fa:16:3e:26:14:b0", "network": {"id": "d686d794-6af9-44b6-b23a-faf14c4ed0b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1123360152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e774b300870a495ca1cc652d8920d32a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b76d917-ec", "ovs_interfaceid": "9b76d917-ecbb-45a2-8959-400914c3c584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.221156] env[68437]: DEBUG oslo_vmware.api [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944324, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205842} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.221395] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 909.221616] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 909.221963] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 909.222109] env[68437]: INFO nova.compute.manager [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 909.222370] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 909.222567] env[68437]: DEBUG nova.compute.manager [-] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 909.222744] env[68437]: DEBUG nova.network.neutron [-] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 909.311576] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944325, 'name': Destroy_Task, 'duration_secs': 0.45056} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.311843] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Destroyed the VM [ 909.316543] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 909.316543] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-60c81df8-8a49-4162-8789-3c74cb414d35 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.323951] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 909.323951] env[68437]: value = "task-2944327" [ 909.323951] env[68437]: _type = "Task" [ 909.323951] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.339503] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944327, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.342720] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944326, 'name': Rename_Task, 'duration_secs': 0.188075} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.343021] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.343378] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17a40b0c-3486-4b08-81d4-408cb112c2b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.350601] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 909.350601] env[68437]: value = "task-2944328" [ 909.350601] env[68437]: _type = "Task" [ 909.350601] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.365791] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944328, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.454184] env[68437]: DEBUG nova.compute.manager [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 909.718553] env[68437]: DEBUG oslo_concurrency.lockutils [req-bbdbd3ef-4d39-4c9c-8c77-53971fa13606 req-ef8d987d-da3e-4d76-86ef-a5998024e5fa service nova] Releasing lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.833183] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944327, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.859208] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944328, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.071110] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92426ab0-b4d0-449c-bec8-6b3d0059a39c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.079180] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e05006-c784-4408-bd54-13653a726f29 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.110278] env[68437]: DEBUG nova.network.neutron [-] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.112354] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea81566-9edf-4ef2-95c3-caeea12afeff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.120587] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846f09fe-a0d1-4bf6-961d-95264add26f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.136049] env[68437]: DEBUG nova.compute.provider_tree [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.320738] env[68437]: DEBUG nova.network.neutron [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Successfully created port: 77a40424-4554-49db-9885-dbac2faad783 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 910.339236] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944327, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.362658] env[68437]: DEBUG oslo_vmware.api [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944328, 'name': PowerOnVM_Task, 'duration_secs': 0.610964} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.362951] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.363168] env[68437]: INFO nova.compute.manager [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Took 8.92 seconds to spawn the instance on the hypervisor. [ 910.363352] env[68437]: DEBUG nova.compute.manager [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.364149] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfded5ba-e7b2-4375-8c44-3fcb0abf2b67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.464768] env[68437]: DEBUG nova.compute.manager [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 910.505021] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 910.505021] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 910.505021] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 910.505021] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 910.505021] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 910.505021] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 910.505576] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 910.505877] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 910.506871] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 910.507199] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 910.508118] env[68437]: DEBUG nova.virt.hardware [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 910.509098] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed1526f-010c-4f7f-8a5d-3550206a2f32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.518716] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f7e20b-ae2d-48b5-bad5-84847d127b2d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.615897] env[68437]: INFO nova.compute.manager [-] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Took 1.39 seconds to deallocate network for instance. [ 910.640068] env[68437]: DEBUG nova.scheduler.client.report [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.645385] env[68437]: DEBUG nova.compute.manager [req-69518ece-63aa-4053-a9d4-7bb694ac9695 req-da137d21-b828-4141-aa36-6499ab3b6013 service nova] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Received event network-vif-deleted-120267fe-8525-41c2-868c-f8e2af61464c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 910.837647] env[68437]: DEBUG oslo_vmware.api [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944327, 'name': RemoveSnapshot_Task, 'duration_secs': 1.397518} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.837888] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 910.838050] env[68437]: INFO nova.compute.manager [None req-498a8747-669f-4e96-b391-5c18ab2f44c9 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Took 17.34 seconds to snapshot the instance on the hypervisor. [ 910.884108] env[68437]: INFO nova.compute.manager [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Took 44.48 seconds to build instance. [ 911.120016] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.147922] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.709s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.148772] env[68437]: DEBUG nova.compute.manager [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.151900] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.846s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.152213] env[68437]: DEBUG nova.objects.instance [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'resources' on Instance uuid 27c18765-38cf-41d6-9139-9acffa94fbe6 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.387550] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0497d159-aacf-467f-b378-0e864f4ae3c3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.651s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.464469] env[68437]: DEBUG nova.compute.manager [req-822026d4-5c8d-4bb5-92bd-1c958fd3052d req-4902eb62-b853-4389-abad-8d79d3e259a6 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Received event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 911.464694] env[68437]: DEBUG nova.compute.manager [req-822026d4-5c8d-4bb5-92bd-1c958fd3052d req-4902eb62-b853-4389-abad-8d79d3e259a6 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing instance network info cache due to event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 911.464911] env[68437]: DEBUG oslo_concurrency.lockutils [req-822026d4-5c8d-4bb5-92bd-1c958fd3052d req-4902eb62-b853-4389-abad-8d79d3e259a6 service nova] Acquiring lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.466924] env[68437]: DEBUG oslo_concurrency.lockutils [req-822026d4-5c8d-4bb5-92bd-1c958fd3052d req-4902eb62-b853-4389-abad-8d79d3e259a6 service nova] Acquired lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.467176] env[68437]: DEBUG nova.network.neutron [req-822026d4-5c8d-4bb5-92bd-1c958fd3052d req-4902eb62-b853-4389-abad-8d79d3e259a6 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing network info cache for port 9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 911.655813] env[68437]: DEBUG nova.compute.utils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 911.663632] env[68437]: DEBUG nova.compute.manager [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.663632] env[68437]: DEBUG nova.network.neutron [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 911.727111] env[68437]: DEBUG nova.policy [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50665629ca424852a9ae988b5b76d76a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd590545b752249c184b49dc720b78944', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 911.878234] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.878507] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.910054] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquiring lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.910337] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.124308] env[68437]: DEBUG nova.network.neutron [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Successfully created port: 5afaab2f-9ec7-4b17-963e-30e20520a140 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.164617] env[68437]: DEBUG nova.compute.manager [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.278555] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed50f71-df72-4665-a454-9e6732aafe5b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.291345] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0196337-6c7f-4c2c-a9e0-a24725624280 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.329525] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c820f142-76db-4c4b-8522-a455bba26367 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.337220] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffffaff9-97b1-4110-a46c-830993318470 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.352856] env[68437]: DEBUG nova.compute.provider_tree [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.359819] env[68437]: DEBUG nova.network.neutron [req-822026d4-5c8d-4bb5-92bd-1c958fd3052d req-4902eb62-b853-4389-abad-8d79d3e259a6 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updated VIF entry in instance network info cache for port 9b76d917-ecbb-45a2-8959-400914c3c584. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 912.360318] env[68437]: DEBUG nova.network.neutron [req-822026d4-5c8d-4bb5-92bd-1c958fd3052d req-4902eb62-b853-4389-abad-8d79d3e259a6 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updating instance_info_cache with network_info: [{"id": "9b76d917-ecbb-45a2-8959-400914c3c584", "address": "fa:16:3e:26:14:b0", "network": {"id": "d686d794-6af9-44b6-b23a-faf14c4ed0b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1123360152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e774b300870a495ca1cc652d8920d32a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b76d917-ec", "ovs_interfaceid": "9b76d917-ecbb-45a2-8959-400914c3c584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.381427] env[68437]: DEBUG nova.compute.manager [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 912.414047] env[68437]: DEBUG nova.compute.manager [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 912.532264] env[68437]: DEBUG nova.network.neutron [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Successfully updated port: 77a40424-4554-49db-9885-dbac2faad783 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 912.854512] env[68437]: DEBUG nova.scheduler.client.report [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.866672] env[68437]: DEBUG oslo_concurrency.lockutils [req-822026d4-5c8d-4bb5-92bd-1c958fd3052d req-4902eb62-b853-4389-abad-8d79d3e259a6 service nova] Releasing lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.001588] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.012182] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.039479] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "refresh_cache-098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.039663] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquired lock "refresh_cache-098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.039840] env[68437]: DEBUG nova.network.neutron [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 913.176822] env[68437]: DEBUG nova.compute.manager [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.202636] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.202911] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.203084] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.203695] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.203888] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.204141] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.204454] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.204621] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.205018] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.205216] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.205407] env[68437]: DEBUG nova.virt.hardware [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.206296] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6596c94-8380-4e86-a393-e8b65418e3b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.216181] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c005215d-9fcc-40e1-aa28-b2951479149d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.361044] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.209s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.363904] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.584s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.363904] env[68437]: DEBUG nova.objects.instance [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lazy-loading 'resources' on Instance uuid 2a0772bf-ce23-4579-9bea-7e706a80cd4d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.390763] env[68437]: INFO nova.scheduler.client.report [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleted allocations for instance 27c18765-38cf-41d6-9139-9acffa94fbe6 [ 913.748218] env[68437]: DEBUG nova.network.neutron [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 913.770548] env[68437]: DEBUG nova.network.neutron [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Successfully updated port: 5afaab2f-9ec7-4b17-963e-30e20520a140 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 913.904624] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4f6050fb-1e8e-4857-a632-b26b7cb1d859 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "27c18765-38cf-41d6-9139-9acffa94fbe6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.297s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.097755] env[68437]: DEBUG nova.network.neutron [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Updating instance_info_cache with network_info: [{"id": "77a40424-4554-49db-9885-dbac2faad783", "address": "fa:16:3e:97:15:b1", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77a40424-45", "ovs_interfaceid": "77a40424-4554-49db-9885-dbac2faad783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.234060] env[68437]: DEBUG nova.compute.manager [req-a6caa9f6-cfa1-4174-b2ce-6ce6cd1ccc41 req-ac5e0833-bdc6-46d7-bb45-8e7a171cc4e8 service nova] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Received event network-vif-plugged-5afaab2f-9ec7-4b17-963e-30e20520a140 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 914.234994] env[68437]: DEBUG oslo_concurrency.lockutils [req-a6caa9f6-cfa1-4174-b2ce-6ce6cd1ccc41 req-ac5e0833-bdc6-46d7-bb45-8e7a171cc4e8 service nova] Acquiring lock "4254002c-d292-4f10-a3d0-387853dbbcb3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.234994] env[68437]: DEBUG oslo_concurrency.lockutils [req-a6caa9f6-cfa1-4174-b2ce-6ce6cd1ccc41 req-ac5e0833-bdc6-46d7-bb45-8e7a171cc4e8 service nova] Lock "4254002c-d292-4f10-a3d0-387853dbbcb3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.234994] env[68437]: DEBUG oslo_concurrency.lockutils [req-a6caa9f6-cfa1-4174-b2ce-6ce6cd1ccc41 req-ac5e0833-bdc6-46d7-bb45-8e7a171cc4e8 service nova] Lock "4254002c-d292-4f10-a3d0-387853dbbcb3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.234994] env[68437]: DEBUG nova.compute.manager [req-a6caa9f6-cfa1-4174-b2ce-6ce6cd1ccc41 req-ac5e0833-bdc6-46d7-bb45-8e7a171cc4e8 service nova] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] No waiting events found dispatching network-vif-plugged-5afaab2f-9ec7-4b17-963e-30e20520a140 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 914.235700] env[68437]: WARNING nova.compute.manager [req-a6caa9f6-cfa1-4174-b2ce-6ce6cd1ccc41 req-ac5e0833-bdc6-46d7-bb45-8e7a171cc4e8 service nova] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Received unexpected event network-vif-plugged-5afaab2f-9ec7-4b17-963e-30e20520a140 for instance with vm_state building and task_state spawning. [ 914.258563] env[68437]: DEBUG nova.compute.manager [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Received event network-vif-plugged-77a40424-4554-49db-9885-dbac2faad783 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 914.258776] env[68437]: DEBUG oslo_concurrency.lockutils [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] Acquiring lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.258975] env[68437]: DEBUG oslo_concurrency.lockutils [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] Lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.259508] env[68437]: DEBUG oslo_concurrency.lockutils [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] Lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.259642] env[68437]: DEBUG nova.compute.manager [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] No waiting events found dispatching network-vif-plugged-77a40424-4554-49db-9885-dbac2faad783 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 914.259763] env[68437]: WARNING nova.compute.manager [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Received unexpected event network-vif-plugged-77a40424-4554-49db-9885-dbac2faad783 for instance with vm_state building and task_state spawning. [ 914.259927] env[68437]: DEBUG nova.compute.manager [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Received event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 914.260093] env[68437]: DEBUG nova.compute.manager [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing instance network info cache due to event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 914.260299] env[68437]: DEBUG oslo_concurrency.lockutils [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] Acquiring lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.261620] env[68437]: DEBUG oslo_concurrency.lockutils [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] Acquired lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.261620] env[68437]: DEBUG nova.network.neutron [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing network info cache for port 9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 914.275160] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "refresh_cache-4254002c-d292-4f10-a3d0-387853dbbcb3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.275160] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquired lock "refresh_cache-4254002c-d292-4f10-a3d0-387853dbbcb3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.275160] env[68437]: DEBUG nova.network.neutron [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 914.426387] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa8c7c8-a508-4144-88b9-d00e4f236240 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.434909] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb73bf28-b213-46cc-b732-8c504fa7a4cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.467513] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a748289-1711-40f2-9a52-826734d99b1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.475376] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5f4886-8fda-4163-a075-1e7fbc1a093f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.493245] env[68437]: DEBUG nova.compute.provider_tree [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.603365] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Releasing lock "refresh_cache-098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.603704] env[68437]: DEBUG nova.compute.manager [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Instance network_info: |[{"id": "77a40424-4554-49db-9885-dbac2faad783", "address": "fa:16:3e:97:15:b1", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77a40424-45", "ovs_interfaceid": "77a40424-4554-49db-9885-dbac2faad783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 914.604298] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:15:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77a40424-4554-49db-9885-dbac2faad783', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.611761] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Creating folder: Project (d590545b752249c184b49dc720b78944). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 914.611939] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ffc561d-0337-4d12-b3a4-7fc1eedd044f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.623140] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Created folder: Project (d590545b752249c184b49dc720b78944) in parent group-v590848. [ 914.623281] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Creating folder: Instances. Parent ref: group-v591009. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 914.623497] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a4dcd3d-1e28-4e04-b650-83547fd99520 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.632466] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Created folder: Instances in parent group-v591009. [ 914.632694] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 914.632874] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 914.633075] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd0c20b3-0985-4f0c-8fe8-4ac6958c27eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.651020] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.651020] env[68437]: value = "task-2944331" [ 914.651020] env[68437]: _type = "Task" [ 914.651020] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.662262] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944331, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.817492] env[68437]: DEBUG nova.network.neutron [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 914.995919] env[68437]: DEBUG nova.scheduler.client.report [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.061103] env[68437]: DEBUG nova.network.neutron [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Updating instance_info_cache with network_info: [{"id": "5afaab2f-9ec7-4b17-963e-30e20520a140", "address": "fa:16:3e:c0:59:fe", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5afaab2f-9e", "ovs_interfaceid": "5afaab2f-9ec7-4b17-963e-30e20520a140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.163732] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944331, 'name': CreateVM_Task, 'duration_secs': 0.311284} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.163732] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 915.163732] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.163732] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.163732] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 915.163732] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-542e0553-db15-45bf-b07b-4c65b9adb517 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.167383] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 915.167383] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a26d75-5587-f48e-8d51-d6229640fffd" [ 915.167383] env[68437]: _type = "Task" [ 915.167383] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.174420] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a26d75-5587-f48e-8d51-d6229640fffd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.296087] env[68437]: DEBUG nova.network.neutron [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updated VIF entry in instance network info cache for port 9b76d917-ecbb-45a2-8959-400914c3c584. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 915.296460] env[68437]: DEBUG nova.network.neutron [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updating instance_info_cache with network_info: [{"id": "9b76d917-ecbb-45a2-8959-400914c3c584", "address": "fa:16:3e:26:14:b0", "network": {"id": "d686d794-6af9-44b6-b23a-faf14c4ed0b9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1123360152-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "e774b300870a495ca1cc652d8920d32a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4020f51-6e46-4b73-a79e-9fe3fd51b917", "external-id": "nsx-vlan-transportzone-16", "segmentation_id": 16, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b76d917-ec", "ovs_interfaceid": "9b76d917-ecbb-45a2-8959-400914c3c584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.503110] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.504010] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.838s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.511577] env[68437]: INFO nova.compute.claims [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.533451] env[68437]: INFO nova.scheduler.client.report [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Deleted allocations for instance 2a0772bf-ce23-4579-9bea-7e706a80cd4d [ 915.563450] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Releasing lock "refresh_cache-4254002c-d292-4f10-a3d0-387853dbbcb3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.564149] env[68437]: DEBUG nova.compute.manager [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Instance network_info: |[{"id": "5afaab2f-9ec7-4b17-963e-30e20520a140", "address": "fa:16:3e:c0:59:fe", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5afaab2f-9e", "ovs_interfaceid": "5afaab2f-9ec7-4b17-963e-30e20520a140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 915.567180] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:59:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dc7aa55d-223a-4157-9137-88dc492f2db2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5afaab2f-9ec7-4b17-963e-30e20520a140', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 915.573635] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 915.573976] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 915.574307] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe2eb018-3083-4858-b650-18dbf14b6072 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.602026] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 915.602026] env[68437]: value = "task-2944332" [ 915.602026] env[68437]: _type = "Task" [ 915.602026] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.612234] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944332, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.677728] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a26d75-5587-f48e-8d51-d6229640fffd, 'name': SearchDatastore_Task, 'duration_secs': 0.044379} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.677728] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.678343] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 915.678343] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.678343] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.678479] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 915.678665] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1147c25-d5dc-424c-8632-674f5c16c92c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.692595] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 915.692595] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 915.693224] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60acdca9-aa80-4fd2-93dc-1c7be7a15902 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.698928] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 915.698928] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52022506-2fc5-df48-b631-c58caa91642c" [ 915.698928] env[68437]: _type = "Task" [ 915.698928] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.709042] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52022506-2fc5-df48-b631-c58caa91642c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.799008] env[68437]: DEBUG oslo_concurrency.lockutils [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] Releasing lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.799597] env[68437]: DEBUG nova.compute.manager [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Received event network-changed-77a40424-4554-49db-9885-dbac2faad783 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 915.799597] env[68437]: DEBUG nova.compute.manager [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Refreshing instance network info cache due to event network-changed-77a40424-4554-49db-9885-dbac2faad783. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 915.799740] env[68437]: DEBUG oslo_concurrency.lockutils [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] Acquiring lock "refresh_cache-098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.800022] env[68437]: DEBUG oslo_concurrency.lockutils [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] Acquired lock "refresh_cache-098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.800022] env[68437]: DEBUG nova.network.neutron [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Refreshing network info cache for port 77a40424-4554-49db-9885-dbac2faad783 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 916.042150] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5717a795-c807-4e6b-a56b-27decfbe41ff tempest-ServerAddressesNegativeTestJSON-246350708 tempest-ServerAddressesNegativeTestJSON-246350708-project-member] Lock "2a0772bf-ce23-4579-9bea-7e706a80cd4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.797s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.112861] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944332, 'name': CreateVM_Task, 'duration_secs': 0.316557} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.113145] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.113852] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.114235] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.114843] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 916.115234] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0fa1b76-21b9-4ced-a52a-4115b305486c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.120825] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 916.120825] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520a2296-e642-bc4e-90d4-19f9ea568511" [ 916.120825] env[68437]: _type = "Task" [ 916.120825] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.128640] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520a2296-e642-bc4e-90d4-19f9ea568511, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.210238] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52022506-2fc5-df48-b631-c58caa91642c, 'name': SearchDatastore_Task, 'duration_secs': 0.014917} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.211086] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e33ee88-b6b5-47b0-819d-603404bf04f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.222865] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 916.222865] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526f6e3b-52a2-1361-269c-f4bb9dd39791" [ 916.222865] env[68437]: _type = "Task" [ 916.222865] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.233508] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526f6e3b-52a2-1361-269c-f4bb9dd39791, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.288588] env[68437]: DEBUG nova.compute.manager [req-ea843f4f-0bcd-4d4f-a921-16c92a4983e9 req-4996594d-0d0c-4ebd-aa34-e3e0aee20d5f service nova] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Received event network-changed-5afaab2f-9ec7-4b17-963e-30e20520a140 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 916.288798] env[68437]: DEBUG nova.compute.manager [req-ea843f4f-0bcd-4d4f-a921-16c92a4983e9 req-4996594d-0d0c-4ebd-aa34-e3e0aee20d5f service nova] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Refreshing instance network info cache due to event network-changed-5afaab2f-9ec7-4b17-963e-30e20520a140. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 916.289096] env[68437]: DEBUG oslo_concurrency.lockutils [req-ea843f4f-0bcd-4d4f-a921-16c92a4983e9 req-4996594d-0d0c-4ebd-aa34-e3e0aee20d5f service nova] Acquiring lock "refresh_cache-4254002c-d292-4f10-a3d0-387853dbbcb3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.289182] env[68437]: DEBUG oslo_concurrency.lockutils [req-ea843f4f-0bcd-4d4f-a921-16c92a4983e9 req-4996594d-0d0c-4ebd-aa34-e3e0aee20d5f service nova] Acquired lock "refresh_cache-4254002c-d292-4f10-a3d0-387853dbbcb3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.289444] env[68437]: DEBUG nova.network.neutron [req-ea843f4f-0bcd-4d4f-a921-16c92a4983e9 req-4996594d-0d0c-4ebd-aa34-e3e0aee20d5f service nova] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Refreshing network info cache for port 5afaab2f-9ec7-4b17-963e-30e20520a140 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 916.317073] env[68437]: DEBUG nova.compute.manager [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Received event network-changed-28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 916.317249] env[68437]: DEBUG nova.compute.manager [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Refreshing instance network info cache due to event network-changed-28d9f2cf-baaf-4817-acdb-525b41381e45. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 916.317457] env[68437]: DEBUG oslo_concurrency.lockutils [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] Acquiring lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.317599] env[68437]: DEBUG oslo_concurrency.lockutils [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] Acquired lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.317755] env[68437]: DEBUG nova.network.neutron [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Refreshing network info cache for port 28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 916.428204] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.428750] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.429031] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.429235] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.429439] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.436052] env[68437]: INFO nova.compute.manager [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Terminating instance [ 916.637364] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520a2296-e642-bc4e-90d4-19f9ea568511, 'name': SearchDatastore_Task, 'duration_secs': 0.014681} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.637690] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.639049] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.639049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.666082] env[68437]: DEBUG nova.network.neutron [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Updated VIF entry in instance network info cache for port 77a40424-4554-49db-9885-dbac2faad783. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 916.666441] env[68437]: DEBUG nova.network.neutron [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Updating instance_info_cache with network_info: [{"id": "77a40424-4554-49db-9885-dbac2faad783", "address": "fa:16:3e:97:15:b1", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77a40424-45", "ovs_interfaceid": "77a40424-4554-49db-9885-dbac2faad783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.735912] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526f6e3b-52a2-1361-269c-f4bb9dd39791, 'name': SearchDatastore_Task, 'duration_secs': 0.021894} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.736199] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.736458] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e/098010b8-b7f7-4bd1-a42c-7fc4dcaa666e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 916.736739] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.737219] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 916.737219] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17b8c3f8-79c1-4cee-9f1e-e0503eca961b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.739655] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c7648ba-e90a-4baa-81bd-849282c61574 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.750302] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 916.750302] env[68437]: value = "task-2944333" [ 916.750302] env[68437]: _type = "Task" [ 916.750302] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.751485] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 916.752499] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 916.755110] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbb206a9-138b-4564-9140-874e0dc23a8c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.765812] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.767545] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 916.767545] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ae098c-b34a-5c9b-9cda-2d93ff3e647e" [ 916.767545] env[68437]: _type = "Task" [ 916.767545] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.776708] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ae098c-b34a-5c9b-9cda-2d93ff3e647e, 'name': SearchDatastore_Task, 'duration_secs': 0.009125} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.778023] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c9e3ea5-c31d-4362-8832-62088064b68a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.786278] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 916.786278] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f4f403-c263-20b3-cc63-e167177655e0" [ 916.786278] env[68437]: _type = "Task" [ 916.786278] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.796591] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f4f403-c263-20b3-cc63-e167177655e0, 'name': SearchDatastore_Task, 'duration_secs': 0.009462} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.796852] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.797124] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 4254002c-d292-4f10-a3d0-387853dbbcb3/4254002c-d292-4f10-a3d0-387853dbbcb3.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 916.797677] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e33b715d-41a3-4713-9752-c0d8613e8573 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.804606] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 916.804606] env[68437]: value = "task-2944334" [ 916.804606] env[68437]: _type = "Task" [ 916.804606] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.812679] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.945319] env[68437]: DEBUG nova.compute.manager [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 916.945547] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 916.946465] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b134d2-d2c8-4b83-952c-c9e690f71169 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.958907] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 916.959220] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83e96890-8ef5-48f0-830a-1c51d4011812 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.965678] env[68437]: DEBUG oslo_vmware.api [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 916.965678] env[68437]: value = "task-2944335" [ 916.965678] env[68437]: _type = "Task" [ 916.965678] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.980333] env[68437]: DEBUG oslo_vmware.api [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.083210] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67982f26-c601-4994-b61d-9fc5aa83a937 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.093324] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf61703-4fe1-47b1-9eda-8cd90d986ebf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.132616] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af47ed06-0e85-4f28-96a5-974077e1a56f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.143641] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a33f7a-a25a-4efd-9a4b-f8613d0bbbca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.161940] env[68437]: DEBUG nova.compute.provider_tree [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.168889] env[68437]: DEBUG oslo_concurrency.lockutils [req-5dac1246-2bb2-429d-bc76-c501ef45a9a7 req-84e8093d-f690-406a-b5d0-e4097466791d service nova] Releasing lock "refresh_cache-098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.262477] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944333, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.317611] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944334, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.375149] env[68437]: DEBUG nova.network.neutron [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Updated VIF entry in instance network info cache for port 28d9f2cf-baaf-4817-acdb-525b41381e45. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 917.375577] env[68437]: DEBUG nova.network.neutron [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Updating instance_info_cache with network_info: [{"id": "28d9f2cf-baaf-4817-acdb-525b41381e45", "address": "fa:16:3e:78:2e:6c", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28d9f2cf-ba", "ovs_interfaceid": "28d9f2cf-baaf-4817-acdb-525b41381e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.481075] env[68437]: DEBUG oslo_vmware.api [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944335, 'name': PowerOffVM_Task, 'duration_secs': 0.237627} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.481367] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.481566] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.481851] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf023c54-f0ad-401e-a9ca-a093db4e8411 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.547588] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.547802] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.548078] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Deleting the datastore file [datastore2] c9d26fd4-f780-4986-8a5f-dea041a70f5d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.548322] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a513b616-b7a3-498d-b895-84b2970190e3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.553124] env[68437]: DEBUG nova.network.neutron [req-ea843f4f-0bcd-4d4f-a921-16c92a4983e9 req-4996594d-0d0c-4ebd-aa34-e3e0aee20d5f service nova] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Updated VIF entry in instance network info cache for port 5afaab2f-9ec7-4b17-963e-30e20520a140. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 917.553510] env[68437]: DEBUG nova.network.neutron [req-ea843f4f-0bcd-4d4f-a921-16c92a4983e9 req-4996594d-0d0c-4ebd-aa34-e3e0aee20d5f service nova] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Updating instance_info_cache with network_info: [{"id": "5afaab2f-9ec7-4b17-963e-30e20520a140", "address": "fa:16:3e:c0:59:fe", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5afaab2f-9e", "ovs_interfaceid": "5afaab2f-9ec7-4b17-963e-30e20520a140", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.556344] env[68437]: DEBUG oslo_vmware.api [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for the task: (returnval){ [ 917.556344] env[68437]: value = "task-2944337" [ 917.556344] env[68437]: _type = "Task" [ 917.556344] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.565821] env[68437]: DEBUG oslo_vmware.api [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.667713] env[68437]: DEBUG nova.scheduler.client.report [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 917.762017] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944333, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597557} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.762570] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e/098010b8-b7f7-4bd1-a42c-7fc4dcaa666e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.763045] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.763248] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a2a7a5a-f78f-4dc9-9d09-1f2eedd3fc47 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.771027] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 917.771027] env[68437]: value = "task-2944338" [ 917.771027] env[68437]: _type = "Task" [ 917.771027] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.780210] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944338, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.817677] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.844029} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.818130] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 4254002c-d292-4f10-a3d0-387853dbbcb3/4254002c-d292-4f10-a3d0-387853dbbcb3.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.818221] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.819024] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2dd0b7a1-873d-4275-ae8e-4262a2a8540a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.827356] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 917.827356] env[68437]: value = "task-2944339" [ 917.827356] env[68437]: _type = "Task" [ 917.827356] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.835505] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944339, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.878635] env[68437]: DEBUG oslo_concurrency.lockutils [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] Releasing lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.878718] env[68437]: DEBUG nova.compute.manager [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Received event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 917.878855] env[68437]: DEBUG nova.compute.manager [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing instance network info cache due to event network-changed-9b76d917-ecbb-45a2-8959-400914c3c584. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 917.879129] env[68437]: DEBUG oslo_concurrency.lockutils [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] Acquiring lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.879888] env[68437]: DEBUG oslo_concurrency.lockutils [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] Acquired lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.879888] env[68437]: DEBUG nova.network.neutron [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Refreshing network info cache for port 9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 918.057547] env[68437]: DEBUG oslo_concurrency.lockutils [req-ea843f4f-0bcd-4d4f-a921-16c92a4983e9 req-4996594d-0d0c-4ebd-aa34-e3e0aee20d5f service nova] Releasing lock "refresh_cache-4254002c-d292-4f10-a3d0-387853dbbcb3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.067788] env[68437]: DEBUG oslo_vmware.api [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Task: {'id': task-2944337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.379599} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.067788] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.067848] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.068052] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.068274] env[68437]: INFO nova.compute.manager [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 918.068575] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 918.068791] env[68437]: DEBUG nova.compute.manager [-] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 918.069099] env[68437]: DEBUG nova.network.neutron [-] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 918.175463] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.176063] env[68437]: DEBUG nova.compute.manager [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 918.178642] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.317s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.180480] env[68437]: INFO nova.compute.claims [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.283205] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944338, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081898} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.283581] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.284270] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b3637c-d44e-40f0-a9fa-0ce51724a1bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.311039] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e/098010b8-b7f7-4bd1-a42c-7fc4dcaa666e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.311803] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5541dd9f-6658-439e-b8bd-24aaaf79a49e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.333665] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 918.333665] env[68437]: value = "task-2944340" [ 918.333665] env[68437]: _type = "Task" [ 918.333665] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.335654] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944339, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072355} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.338552] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.340506] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21730ed7-1f84-481e-89eb-a72cf318495a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.346077] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.346291] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.351825] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944340, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.370996] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 4254002c-d292-4f10-a3d0-387853dbbcb3/4254002c-d292-4f10-a3d0-387853dbbcb3.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.372198] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8e36dc9-e8da-484b-b5f4-f37f5da21c92 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.394456] env[68437]: DEBUG nova.compute.manager [req-79ee1648-8636-4d09-b9b6-8f850f9cb292 req-d50f579b-6942-4386-a778-7a77b17a45b7 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Received event network-vif-deleted-9b76d917-ecbb-45a2-8959-400914c3c584 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 918.394926] env[68437]: INFO nova.compute.manager [req-79ee1648-8636-4d09-b9b6-8f850f9cb292 req-d50f579b-6942-4386-a778-7a77b17a45b7 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Neutron deleted interface 9b76d917-ecbb-45a2-8959-400914c3c584; detaching it from the instance and deleting it from the info cache [ 918.394926] env[68437]: DEBUG nova.network.neutron [req-79ee1648-8636-4d09-b9b6-8f850f9cb292 req-d50f579b-6942-4386-a778-7a77b17a45b7 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.399705] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 918.399705] env[68437]: value = "task-2944341" [ 918.399705] env[68437]: _type = "Task" [ 918.399705] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.408264] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944341, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.409240] env[68437]: INFO nova.network.neutron [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Port 9b76d917-ecbb-45a2-8959-400914c3c584 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 918.409498] env[68437]: DEBUG nova.network.neutron [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.685802] env[68437]: DEBUG nova.compute.utils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 918.691049] env[68437]: DEBUG nova.compute.manager [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 918.691049] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 918.768879] env[68437]: DEBUG nova.policy [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd1ba077acdb424681ab37b017eaff0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '376f884a99d6438aa53e3df5b9c34450', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 918.845782] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944340, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.848337] env[68437]: DEBUG nova.compute.manager [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 918.890931] env[68437]: DEBUG nova.network.neutron [-] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.898977] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6159dc43-41ab-44b0-ab79-56289f6770f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.910716] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944341, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.912018] env[68437]: DEBUG oslo_concurrency.lockutils [req-429e0d21-ecc3-42df-aeef-5e27ab035c76 req-32e68867-579d-4da2-8127-156d1bc18238 service nova] Releasing lock "refresh_cache-c9d26fd4-f780-4986-8a5f-dea041a70f5d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.915201] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8867200-91ee-4ea5-a822-b00cab3197a4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.952709] env[68437]: DEBUG nova.compute.manager [req-79ee1648-8636-4d09-b9b6-8f850f9cb292 req-d50f579b-6942-4386-a778-7a77b17a45b7 service nova] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Detach interface failed, port_id=9b76d917-ecbb-45a2-8959-400914c3c584, reason: Instance c9d26fd4-f780-4986-8a5f-dea041a70f5d could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 919.191113] env[68437]: DEBUG nova.compute.manager [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 919.234492] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Successfully created port: 309987c8-0aed-4755-a8e0-b6416865aff4 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.349981] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944340, 'name': ReconfigVM_Task, 'duration_secs': 0.668678} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.350407] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e/098010b8-b7f7-4bd1-a42c-7fc4dcaa666e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.351102] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49bc4c1b-a48a-450a-85fe-8cef5d20477d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.361691] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 919.361691] env[68437]: value = "task-2944342" [ 919.361691] env[68437]: _type = "Task" [ 919.361691] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.371944] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944342, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.373103] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.397938] env[68437]: INFO nova.compute.manager [-] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Took 1.33 seconds to deallocate network for instance. [ 919.414691] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944341, 'name': ReconfigVM_Task, 'duration_secs': 0.626937} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.414993] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 4254002c-d292-4f10-a3d0-387853dbbcb3/4254002c-d292-4f10-a3d0-387853dbbcb3.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.415639] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d90256b8-a2d3-4f6e-8bee-a33a6639849e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.422211] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 919.422211] env[68437]: value = "task-2944343" [ 919.422211] env[68437]: _type = "Task" [ 919.422211] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.434434] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944343, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.606620] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Successfully created port: c38e66ad-bd88-409c-812a-3fbbee675bfa {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.668136] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d48bd04-5842-40fa-82dd-0aec415da0ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.675892] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef23d39f-ba1b-4954-98e6-0688f8108ea9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.708604] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae058d1-f61d-4e11-808e-e6e3a63bfd2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.716132] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7e2986-ae95-48eb-bb6c-c38c93809877 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.728989] env[68437]: DEBUG nova.compute.provider_tree [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 919.874760] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944342, 'name': Rename_Task, 'duration_secs': 0.148734} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.875251] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.875618] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a51d3225-e50b-4fd0-9c2f-a07d94b635f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.883192] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 919.883192] env[68437]: value = "task-2944344" [ 919.883192] env[68437]: _type = "Task" [ 919.883192] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.894535] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.899015] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Successfully created port: 93185981-83ed-4eb0-b83c-0ca6e684d267 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.909378] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.931961] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944343, 'name': Rename_Task, 'duration_secs': 0.136245} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.932268] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.932495] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7cfb8870-868c-4e88-9363-9d46a3cf6a92 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.937921] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 919.937921] env[68437]: value = "task-2944345" [ 919.937921] env[68437]: _type = "Task" [ 919.937921] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.945241] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944345, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.212608] env[68437]: DEBUG nova.compute.manager [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 920.232973] env[68437]: DEBUG nova.scheduler.client.report [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.242362] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 920.242362] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.242552] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 920.242753] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.242845] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 920.242962] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 920.243319] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 920.243394] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 920.243585] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 920.243706] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 920.243910] env[68437]: DEBUG nova.virt.hardware [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 920.244953] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf7946a-0cb8-4c90-9c80-34649861f403 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.253090] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63fde322-6fcc-46bb-a22b-991459177127 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.394577] env[68437]: DEBUG oslo_vmware.api [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944344, 'name': PowerOnVM_Task, 'duration_secs': 0.504272} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.394907] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.395097] env[68437]: INFO nova.compute.manager [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Took 9.93 seconds to spawn the instance on the hypervisor. [ 920.395537] env[68437]: DEBUG nova.compute.manager [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 920.396115] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fb2389-f051-40c7-aa91-032b9690ac04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.448515] env[68437]: DEBUG oslo_vmware.api [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944345, 'name': PowerOnVM_Task, 'duration_secs': 0.445762} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.449551] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.449735] env[68437]: INFO nova.compute.manager [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Took 7.27 seconds to spawn the instance on the hypervisor. [ 920.449914] env[68437]: DEBUG nova.compute.manager [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 920.450731] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b766ef-7008-4cce-ace1-40404a33b381 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.738047] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.738412] env[68437]: DEBUG nova.compute.manager [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 920.741544] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 35.315s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.741785] env[68437]: DEBUG nova.objects.instance [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 920.915782] env[68437]: INFO nova.compute.manager [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Took 48.79 seconds to build instance. [ 920.970697] env[68437]: INFO nova.compute.manager [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Took 46.58 seconds to build instance. [ 921.243251] env[68437]: DEBUG nova.compute.utils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 921.244615] env[68437]: DEBUG nova.compute.manager [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 921.401111] env[68437]: DEBUG nova.compute.manager [req-6057065c-c8bc-481d-8b6d-b439eb535d55 req-0081f33f-7362-4768-8c52-7b127d8e83a9 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received event network-vif-plugged-309987c8-0aed-4755-a8e0-b6416865aff4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 921.401386] env[68437]: DEBUG oslo_concurrency.lockutils [req-6057065c-c8bc-481d-8b6d-b439eb535d55 req-0081f33f-7362-4768-8c52-7b127d8e83a9 service nova] Acquiring lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.401667] env[68437]: DEBUG oslo_concurrency.lockutils [req-6057065c-c8bc-481d-8b6d-b439eb535d55 req-0081f33f-7362-4768-8c52-7b127d8e83a9 service nova] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.401839] env[68437]: DEBUG oslo_concurrency.lockutils [req-6057065c-c8bc-481d-8b6d-b439eb535d55 req-0081f33f-7362-4768-8c52-7b127d8e83a9 service nova] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.401990] env[68437]: DEBUG nova.compute.manager [req-6057065c-c8bc-481d-8b6d-b439eb535d55 req-0081f33f-7362-4768-8c52-7b127d8e83a9 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] No waiting events found dispatching network-vif-plugged-309987c8-0aed-4755-a8e0-b6416865aff4 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 921.402248] env[68437]: WARNING nova.compute.manager [req-6057065c-c8bc-481d-8b6d-b439eb535d55 req-0081f33f-7362-4768-8c52-7b127d8e83a9 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received unexpected event network-vif-plugged-309987c8-0aed-4755-a8e0-b6416865aff4 for instance with vm_state building and task_state spawning. [ 921.417008] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6bce507d-5376-4c5a-8557-c77bad89801e tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.169s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.472854] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d707d289-51d6-4bcc-bee5-e1d4c0d42c20 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "4254002c-d292-4f10-a3d0-387853dbbcb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.863s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.591401] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Successfully updated port: 309987c8-0aed-4755-a8e0-b6416865aff4 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 921.749009] env[68437]: DEBUG nova.compute.manager [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 921.753308] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0f62f476-6e90-490c-9da7-da8e0c3f8a00 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.754682] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 32.206s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.259308] env[68437]: INFO nova.compute.claims [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.768815] env[68437]: DEBUG nova.compute.manager [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 922.773694] env[68437]: INFO nova.compute.resource_tracker [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating resource usage from migration b17182b5-43cf-4749-8362-d1a3df183f00 [ 922.798592] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 922.799059] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 922.800025] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 922.800025] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 922.800025] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 922.800025] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 922.800206] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 922.800390] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 922.800610] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 922.800861] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 922.801058] env[68437]: DEBUG nova.virt.hardware [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 922.801978] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b3b69a-0e64-4da4-a04b-54da73b6157e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.814713] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7371f838-68f7-46a8-9567-20b23971e47f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.831111] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.838437] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Creating folder: Project (0ceabb7c93dc44a29ed0b8bcaa25e4f3). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 922.841694] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5e647ef-2c7e-4004-99d5-ff1e0f71c95f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.852541] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Created folder: Project (0ceabb7c93dc44a29ed0b8bcaa25e4f3) in parent group-v590848. [ 922.852725] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Creating folder: Instances. Parent ref: group-v591013. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 922.852973] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59c3f92f-72e2-45b1-a47f-7caf6be9687a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.861460] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Created folder: Instances in parent group-v591013. [ 922.861721] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.861931] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 922.862148] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ef97adb-fb90-4b1a-8ce9-f71e9ff56a8c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.883019] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.883019] env[68437]: value = "task-2944348" [ 922.883019] env[68437]: _type = "Task" [ 922.883019] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.895278] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944348, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.146199] env[68437]: DEBUG nova.compute.manager [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 923.147213] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee81fb2f-9c45-4584-a07a-f24685b605e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.267295] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f270f570-6ea7-44c8-aea4-9566eb4f0583 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.275313] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d12489-ac62-4ec6-8411-68a52ecda929 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.306483] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb992fc5-baeb-4698-af38-0b7e4324138e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.314235] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d947359-4e36-47f3-bf21-406c948ca531 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.327981] env[68437]: DEBUG nova.compute.provider_tree [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.392255] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944348, 'name': CreateVM_Task, 'duration_secs': 0.297815} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.392435] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 923.392867] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.393044] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.393378] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 923.393632] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59169f9d-8b5c-4e5b-b0b5-69637d277bc2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.398278] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 923.398278] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5283fd70-1479-adbc-7123-73c91fe1d2ed" [ 923.398278] env[68437]: _type = "Task" [ 923.398278] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.406217] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5283fd70-1479-adbc-7123-73c91fe1d2ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.444844] env[68437]: DEBUG nova.compute.manager [req-17dd17d8-970b-4ae5-9177-3d7a5ca6b6c5 req-cde15fd0-d580-4b97-8f55-66bbedb98f88 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received event network-changed-309987c8-0aed-4755-a8e0-b6416865aff4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 923.445105] env[68437]: DEBUG nova.compute.manager [req-17dd17d8-970b-4ae5-9177-3d7a5ca6b6c5 req-cde15fd0-d580-4b97-8f55-66bbedb98f88 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Refreshing instance network info cache due to event network-changed-309987c8-0aed-4755-a8e0-b6416865aff4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 923.445262] env[68437]: DEBUG oslo_concurrency.lockutils [req-17dd17d8-970b-4ae5-9177-3d7a5ca6b6c5 req-cde15fd0-d580-4b97-8f55-66bbedb98f88 service nova] Acquiring lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.445396] env[68437]: DEBUG oslo_concurrency.lockutils [req-17dd17d8-970b-4ae5-9177-3d7a5ca6b6c5 req-cde15fd0-d580-4b97-8f55-66bbedb98f88 service nova] Acquired lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.445557] env[68437]: DEBUG nova.network.neutron [req-17dd17d8-970b-4ae5-9177-3d7a5ca6b6c5 req-cde15fd0-d580-4b97-8f55-66bbedb98f88 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Refreshing network info cache for port 309987c8-0aed-4755-a8e0-b6416865aff4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 923.660397] env[68437]: INFO nova.compute.manager [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] instance snapshotting [ 923.663289] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a84da65-8973-4ae2-8feb-a5bf2525459b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.683137] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97f1c75-aa5d-4f19-80fb-4184c053ee00 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.727611] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Successfully updated port: c38e66ad-bd88-409c-812a-3fbbee675bfa {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 923.831345] env[68437]: DEBUG nova.scheduler.client.report [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 923.908214] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5283fd70-1479-adbc-7123-73c91fe1d2ed, 'name': SearchDatastore_Task, 'duration_secs': 0.036119} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.908537] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.908816] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 923.909091] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.909248] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.909428] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.909695] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-769f12e0-2b6a-4355-b948-bc982cc09439 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.918321] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.918501] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.919245] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8997d32d-1b67-4600-a4a8-8cd18af1d0cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.924386] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 923.924386] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521c6652-e100-a958-6718-9d2edf2ea99f" [ 923.924386] env[68437]: _type = "Task" [ 923.924386] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.931798] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521c6652-e100-a958-6718-9d2edf2ea99f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.991415] env[68437]: DEBUG nova.network.neutron [req-17dd17d8-970b-4ae5-9177-3d7a5ca6b6c5 req-cde15fd0-d580-4b97-8f55-66bbedb98f88 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 924.079335] env[68437]: DEBUG nova.network.neutron [req-17dd17d8-970b-4ae5-9177-3d7a5ca6b6c5 req-cde15fd0-d580-4b97-8f55-66bbedb98f88 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.191992] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 924.192280] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-33b655aa-c206-4e60-9b87-f9250669ff96 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.199870] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 924.199870] env[68437]: value = "task-2944349" [ 924.199870] env[68437]: _type = "Task" [ 924.199870] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.207386] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944349, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.336994] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.582s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.337361] env[68437]: INFO nova.compute.manager [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Migrating [ 924.343936] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.903s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.344130] env[68437]: DEBUG nova.objects.instance [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 924.437484] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521c6652-e100-a958-6718-9d2edf2ea99f, 'name': SearchDatastore_Task, 'duration_secs': 0.011966} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.438597] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e014eea7-851b-4716-a034-75bb3d4d72ee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.444901] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 924.444901] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526bbe64-921c-67c6-f8a0-7d08fc354cd7" [ 924.444901] env[68437]: _type = "Task" [ 924.444901] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.455618] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526bbe64-921c-67c6-f8a0-7d08fc354cd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.581804] env[68437]: DEBUG oslo_concurrency.lockutils [req-17dd17d8-970b-4ae5-9177-3d7a5ca6b6c5 req-cde15fd0-d580-4b97-8f55-66bbedb98f88 service nova] Releasing lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.710999] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944349, 'name': CreateSnapshot_Task, 'duration_secs': 0.474519} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.711295] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 924.712045] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e6134ac-ff83-41f5-a6bd-5d9074cbb300 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.853794] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.854066] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.854157] env[68437]: DEBUG nova.network.neutron [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 924.955965] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526bbe64-921c-67c6-f8a0-7d08fc354cd7, 'name': SearchDatastore_Task, 'duration_secs': 0.009606} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.956252] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.956624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 6d09b9e8-f701-4548-8ec3-c1d9e69223ee/6d09b9e8-f701-4548-8ec3-c1d9e69223ee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.957014] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-993a6b80-f4d0-408c-89ff-6947ae94920a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.966606] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 924.966606] env[68437]: value = "task-2944350" [ 924.966606] env[68437]: _type = "Task" [ 924.966606] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.974174] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944350, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.229835] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 925.229835] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9d84380f-e5b0-4a07-8148-56790a487964 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.241499] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 925.241499] env[68437]: value = "task-2944351" [ 925.241499] env[68437]: _type = "Task" [ 925.241499] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.251083] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944351, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.353953] env[68437]: DEBUG oslo_concurrency.lockutils [None req-daa794b5-ba3b-4375-b566-91781929e616 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.355130] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.605s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.355412] env[68437]: DEBUG nova.objects.instance [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lazy-loading 'resources' on Instance uuid 892bf198-7d05-4995-8137-c57095c5c839 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.475805] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944350, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503317} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.476108] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 6d09b9e8-f701-4548-8ec3-c1d9e69223ee/6d09b9e8-f701-4548-8ec3-c1d9e69223ee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.476330] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.476623] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e78ac609-e0ec-4c09-8b92-d670f587cc15 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.482942] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 925.482942] env[68437]: value = "task-2944352" [ 925.482942] env[68437]: _type = "Task" [ 925.482942] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.485391] env[68437]: DEBUG nova.compute.manager [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received event network-vif-plugged-c38e66ad-bd88-409c-812a-3fbbee675bfa {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 925.485738] env[68437]: DEBUG oslo_concurrency.lockutils [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] Acquiring lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.486007] env[68437]: DEBUG oslo_concurrency.lockutils [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.486297] env[68437]: DEBUG oslo_concurrency.lockutils [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.486589] env[68437]: DEBUG nova.compute.manager [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] No waiting events found dispatching network-vif-plugged-c38e66ad-bd88-409c-812a-3fbbee675bfa {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 925.486875] env[68437]: WARNING nova.compute.manager [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received unexpected event network-vif-plugged-c38e66ad-bd88-409c-812a-3fbbee675bfa for instance with vm_state building and task_state spawning. [ 925.487184] env[68437]: DEBUG nova.compute.manager [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received event network-changed-c38e66ad-bd88-409c-812a-3fbbee675bfa {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 925.487510] env[68437]: DEBUG nova.compute.manager [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Refreshing instance network info cache due to event network-changed-c38e66ad-bd88-409c-812a-3fbbee675bfa. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 925.488396] env[68437]: DEBUG oslo_concurrency.lockutils [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] Acquiring lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.488396] env[68437]: DEBUG oslo_concurrency.lockutils [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] Acquired lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.488396] env[68437]: DEBUG nova.network.neutron [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Refreshing network info cache for port c38e66ad-bd88-409c-812a-3fbbee675bfa {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 925.502194] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944352, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.593540] env[68437]: DEBUG nova.network.neutron [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance_info_cache with network_info: [{"id": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "address": "fa:16:3e:87:d9:eb", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba0a9a2-70", "ovs_interfaceid": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.752648] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944351, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.900007] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Successfully updated port: 93185981-83ed-4eb0-b83c-0ca6e684d267 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 925.995020] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944352, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073918} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.995020] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.995467] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8988a605-124e-4e07-a6b5-7055a285a3eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.017649] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 6d09b9e8-f701-4548-8ec3-c1d9e69223ee/6d09b9e8-f701-4548-8ec3-c1d9e69223ee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.020530] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-287f8648-8c9a-4a65-9c89-cbc5f1f36ca0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.041637] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 926.041637] env[68437]: value = "task-2944353" [ 926.041637] env[68437]: _type = "Task" [ 926.041637] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.045397] env[68437]: DEBUG nova.network.neutron [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 926.056132] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944353, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.096528] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.200535] env[68437]: DEBUG nova.network.neutron [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.252651] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944351, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.339316] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00343b9f-4dac-4dbb-83dc-a7ef1b7fc2b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.344572] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bdab43-d96c-4b65-b67c-17320a2de771 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.374333] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47992eb-6475-488f-8639-93227c030dcd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.381549] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72eccd42-0b96-4537-9ba3-47691af00b36 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.395552] env[68437]: DEBUG nova.compute.provider_tree [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.403437] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.551611] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.706958] env[68437]: DEBUG oslo_concurrency.lockutils [req-c72ded97-435f-4bc2-abcf-91282b33d41a req-76becce9-5444-4a10-90d0-233589a04029 service nova] Releasing lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.706958] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquired lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.706958] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 926.755074] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944351, 'name': CloneVM_Task} progress is 95%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.899052] env[68437]: DEBUG nova.scheduler.client.report [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.052381] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944353, 'name': ReconfigVM_Task, 'duration_secs': 0.955498} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.052670] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 6d09b9e8-f701-4548-8ec3-c1d9e69223ee/6d09b9e8-f701-4548-8ec3-c1d9e69223ee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 927.053366] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e99b880e-aa6e-49c9-a412-dc3732e80ca1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.059504] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 927.059504] env[68437]: value = "task-2944354" [ 927.059504] env[68437]: _type = "Task" [ 927.059504] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.067410] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944354, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.255942] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944351, 'name': CloneVM_Task, 'duration_secs': 1.778466} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.256386] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Created linked-clone VM from snapshot [ 927.257186] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5410ee34-6a21-45cf-8766-2e5d97c80e11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.264843] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Uploading image b4cab361-c60e-4026-b20f-56f65663461e {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 927.267448] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 927.290115] env[68437]: DEBUG oslo_vmware.rw_handles [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 927.290115] env[68437]: value = "vm-591017" [ 927.290115] env[68437]: _type = "VirtualMachine" [ 927.290115] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 927.290425] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-da594a75-fdda-40c4-9ef7-22faff8d2075 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.298364] env[68437]: DEBUG oslo_vmware.rw_handles [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lease: (returnval){ [ 927.298364] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52893f0b-3860-0f7e-0432-3a378ab7be45" [ 927.298364] env[68437]: _type = "HttpNfcLease" [ 927.298364] env[68437]: } obtained for exporting VM: (result){ [ 927.298364] env[68437]: value = "vm-591017" [ 927.298364] env[68437]: _type = "VirtualMachine" [ 927.298364] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 927.298597] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the lease: (returnval){ [ 927.298597] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52893f0b-3860-0f7e-0432-3a378ab7be45" [ 927.298597] env[68437]: _type = "HttpNfcLease" [ 927.298597] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 927.304726] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 927.304726] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52893f0b-3860-0f7e-0432-3a378ab7be45" [ 927.304726] env[68437]: _type = "HttpNfcLease" [ 927.304726] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 927.408607] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.053s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.414045] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.603s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.414313] env[68437]: DEBUG nova.objects.instance [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lazy-loading 'resources' on Instance uuid ad773afa-fc0a-4380-901d-af013ce55f2b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.437516] env[68437]: INFO nova.scheduler.client.report [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Deleted allocations for instance 892bf198-7d05-4995-8137-c57095c5c839 [ 927.570473] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944354, 'name': Rename_Task, 'duration_secs': 0.136735} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.570748] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.571033] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c87d041f-0500-4cfd-ba97-43e90498ec09 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.578515] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 927.578515] env[68437]: value = "task-2944356" [ 927.578515] env[68437]: _type = "Task" [ 927.578515] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.590858] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.613954] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563cb2e1-29a3-43fd-b8a4-8c2e97dbf0e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.633201] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance '6d877579-3095-4ee9-bb3e-4d5a9122f1ed' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 927.728467] env[68437]: DEBUG nova.compute.manager [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received event network-vif-plugged-93185981-83ed-4eb0-b83c-0ca6e684d267 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 927.729473] env[68437]: DEBUG oslo_concurrency.lockutils [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] Acquiring lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.729473] env[68437]: DEBUG oslo_concurrency.lockutils [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.729473] env[68437]: DEBUG oslo_concurrency.lockutils [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.729473] env[68437]: DEBUG nova.compute.manager [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] No waiting events found dispatching network-vif-plugged-93185981-83ed-4eb0-b83c-0ca6e684d267 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 927.729473] env[68437]: WARNING nova.compute.manager [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received unexpected event network-vif-plugged-93185981-83ed-4eb0-b83c-0ca6e684d267 for instance with vm_state building and task_state spawning. [ 927.729722] env[68437]: DEBUG nova.compute.manager [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received event network-changed-93185981-83ed-4eb0-b83c-0ca6e684d267 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 927.729722] env[68437]: DEBUG nova.compute.manager [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Refreshing instance network info cache due to event network-changed-93185981-83ed-4eb0-b83c-0ca6e684d267. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 927.729781] env[68437]: DEBUG oslo_concurrency.lockutils [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] Acquiring lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.811939] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 927.811939] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52893f0b-3860-0f7e-0432-3a378ab7be45" [ 927.811939] env[68437]: _type = "HttpNfcLease" [ 927.811939] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 927.812282] env[68437]: DEBUG oslo_vmware.rw_handles [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 927.812282] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52893f0b-3860-0f7e-0432-3a378ab7be45" [ 927.812282] env[68437]: _type = "HttpNfcLease" [ 927.812282] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 927.813097] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc98456-867b-489a-a67e-67bb0292a4c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.821478] env[68437]: DEBUG oslo_vmware.rw_handles [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529fc795-93c7-9629-8e5f-9f0567b8b5ef/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 927.821889] env[68437]: DEBUG oslo_vmware.rw_handles [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529fc795-93c7-9629-8e5f-9f0567b8b5ef/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 927.924445] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-91efeb06-ffd4-4a47-8796-00ce137e40bb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.946512] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff07b650-bba4-4f8f-8872-18bfceee4617 tempest-ServerShowV254Test-751247983 tempest-ServerShowV254Test-751247983-project-member] Lock "892bf198-7d05-4995-8137-c57095c5c839" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.524s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.092947] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944356, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.141941] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 928.142558] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99be04d9-d85f-407d-9957-8982004596ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.153679] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 928.153679] env[68437]: value = "task-2944357" [ 928.153679] env[68437]: _type = "Task" [ 928.153679] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.168952] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.240455] env[68437]: DEBUG nova.network.neutron [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Updating instance_info_cache with network_info: [{"id": "309987c8-0aed-4755-a8e0-b6416865aff4", "address": "fa:16:3e:ca:dc:ee", "network": {"id": "0db9f38c-b42a-4538-923a-8bc987f7d133", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1057049740", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap309987c8-0a", "ovs_interfaceid": "309987c8-0aed-4755-a8e0-b6416865aff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c38e66ad-bd88-409c-812a-3fbbee675bfa", "address": "fa:16:3e:b3:1c:32", "network": {"id": "213da79d-a765-461c-b6d7-6b9e54eaed18", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1557044246", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38e66ad-bd", "ovs_interfaceid": "c38e66ad-bd88-409c-812a-3fbbee675bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "93185981-83ed-4eb0-b83c-0ca6e684d267", "address": "fa:16:3e:3e:8a:00", "network": {"id": "0db9f38c-b42a-4538-923a-8bc987f7d133", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1057049740", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93185981-83", "ovs_interfaceid": "93185981-83ed-4eb0-b83c-0ca6e684d267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.472729] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2e9298-53f9-480e-83fc-e728a2a5af2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.481943] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d2af3e-781d-4dc4-ba07-ec5f6524e863 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.514738] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825e3691-7b6b-4e48-8b44-255c6d0a2ba7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.523688] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0267a0-34d0-451d-9cde-161467f4a743 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.540653] env[68437]: DEBUG nova.compute.provider_tree [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.592667] env[68437]: DEBUG oslo_vmware.api [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944356, 'name': PowerOnVM_Task, 'duration_secs': 0.710483} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.593386] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.593499] env[68437]: INFO nova.compute.manager [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Took 5.82 seconds to spawn the instance on the hypervisor. [ 928.593705] env[68437]: DEBUG nova.compute.manager [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.594527] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485783f3-3315-4243-b803-0c01dd3f507e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.665146] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944357, 'name': PowerOffVM_Task, 'duration_secs': 0.234945} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.665496] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 928.665679] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance '6d877579-3095-4ee9-bb3e-4d5a9122f1ed' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 928.747203] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Releasing lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.747639] env[68437]: DEBUG nova.compute.manager [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Instance network_info: |[{"id": "309987c8-0aed-4755-a8e0-b6416865aff4", "address": "fa:16:3e:ca:dc:ee", "network": {"id": "0db9f38c-b42a-4538-923a-8bc987f7d133", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1057049740", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap309987c8-0a", "ovs_interfaceid": "309987c8-0aed-4755-a8e0-b6416865aff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c38e66ad-bd88-409c-812a-3fbbee675bfa", "address": "fa:16:3e:b3:1c:32", "network": {"id": "213da79d-a765-461c-b6d7-6b9e54eaed18", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1557044246", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38e66ad-bd", "ovs_interfaceid": "c38e66ad-bd88-409c-812a-3fbbee675bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "93185981-83ed-4eb0-b83c-0ca6e684d267", "address": "fa:16:3e:3e:8a:00", "network": {"id": "0db9f38c-b42a-4538-923a-8bc987f7d133", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1057049740", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93185981-83", "ovs_interfaceid": "93185981-83ed-4eb0-b83c-0ca6e684d267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 928.748035] env[68437]: DEBUG oslo_concurrency.lockutils [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] Acquired lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.748330] env[68437]: DEBUG nova.network.neutron [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Refreshing network info cache for port 93185981-83ed-4eb0-b83c-0ca6e684d267 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 928.749642] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:dc:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8812601-ae67-4e0d-b9a2-710b86c53ac5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '309987c8-0aed-4755-a8e0-b6416865aff4', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:1c:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c68b7663-4f0e-47f0-ac7f-40c6d952f7bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c38e66ad-bd88-409c-812a-3fbbee675bfa', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:8a:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8812601-ae67-4e0d-b9a2-710b86c53ac5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93185981-83ed-4eb0-b83c-0ca6e684d267', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 928.762313] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Creating folder: Project (376f884a99d6438aa53e3df5b9c34450). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 928.767324] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae106dc2-a54d-440f-a7fb-88ade903c8e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.782423] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Created folder: Project (376f884a99d6438aa53e3df5b9c34450) in parent group-v590848. [ 928.783042] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Creating folder: Instances. Parent ref: group-v591018. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 928.783392] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-206a3b55-7fa7-48d7-b293-403e3702c4eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.798479] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Created folder: Instances in parent group-v591018. [ 928.801017] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 928.801017] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 928.801017] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84905ee8-bb40-41d4-8349-f1124bfbc4c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.831314] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 928.831314] env[68437]: value = "task-2944360" [ 928.831314] env[68437]: _type = "Task" [ 928.831314] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.841333] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944360, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.044172] env[68437]: DEBUG nova.scheduler.client.report [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.097466] env[68437]: DEBUG nova.network.neutron [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Updated VIF entry in instance network info cache for port 93185981-83ed-4eb0-b83c-0ca6e684d267. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 929.097945] env[68437]: DEBUG nova.network.neutron [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Updating instance_info_cache with network_info: [{"id": "309987c8-0aed-4755-a8e0-b6416865aff4", "address": "fa:16:3e:ca:dc:ee", "network": {"id": "0db9f38c-b42a-4538-923a-8bc987f7d133", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1057049740", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap309987c8-0a", "ovs_interfaceid": "309987c8-0aed-4755-a8e0-b6416865aff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c38e66ad-bd88-409c-812a-3fbbee675bfa", "address": "fa:16:3e:b3:1c:32", "network": {"id": "213da79d-a765-461c-b6d7-6b9e54eaed18", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1557044246", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38e66ad-bd", "ovs_interfaceid": "c38e66ad-bd88-409c-812a-3fbbee675bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "93185981-83ed-4eb0-b83c-0ca6e684d267", "address": "fa:16:3e:3e:8a:00", "network": {"id": "0db9f38c-b42a-4538-923a-8bc987f7d133", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1057049740", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93185981-83", "ovs_interfaceid": "93185981-83ed-4eb0-b83c-0ca6e684d267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.113214] env[68437]: INFO nova.compute.manager [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Took 46.27 seconds to build instance. [ 929.172180] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 929.172584] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.172772] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 929.172960] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.173132] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 929.173407] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 929.173694] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 929.173868] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 929.174054] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 929.174226] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 929.174400] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 929.179629] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-855149b1-e5fa-45eb-b197-8b37022c525a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.196984] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 929.196984] env[68437]: value = "task-2944361" [ 929.196984] env[68437]: _type = "Task" [ 929.196984] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.209136] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944361, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.342166] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944360, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.550200] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.136s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.553340] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.544s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.553340] env[68437]: DEBUG nova.objects.instance [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 929.575781] env[68437]: INFO nova.scheduler.client.report [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Deleted allocations for instance ad773afa-fc0a-4380-901d-af013ce55f2b [ 929.602884] env[68437]: DEBUG oslo_concurrency.lockutils [req-bcb5645b-9ea6-45ee-bb12-715e9fa5df14 req-026e42c9-686f-4529-a834-0f331d2e227e service nova] Releasing lock "refresh_cache-221fcaf9-e17a-4594-90be-9dd49e7df424" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.615676] env[68437]: DEBUG oslo_concurrency.lockutils [None req-499dac92-c15e-4a8e-9879-393d7e1a6db1 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "6d09b9e8-f701-4548-8ec3-c1d9e69223ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.790s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.711109] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944361, 'name': ReconfigVM_Task, 'duration_secs': 0.247142} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.711506] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance '6d877579-3095-4ee9-bb3e-4d5a9122f1ed' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 929.842774] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944360, 'name': CreateVM_Task, 'duration_secs': 0.722697} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.842976] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 929.843845] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.844021] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.844346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 929.844608] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d86f3f23-5b55-4f63-ae00-8ab75e6385af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.850332] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 929.850332] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524e546c-6945-0f10-6a66-a852a573e7b2" [ 929.850332] env[68437]: _type = "Task" [ 929.850332] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.859513] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524e546c-6945-0f10-6a66-a852a573e7b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.083823] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c6914c1a-9dc5-4a8a-a960-b54a0d8c2649 tempest-ServerRescueTestJSON-1765231545 tempest-ServerRescueTestJSON-1765231545-project-member] Lock "ad773afa-fc0a-4380-901d-af013ce55f2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.655s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.217536] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 930.217917] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.218014] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 930.218127] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.218274] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 930.218419] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 930.218825] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 930.219045] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 930.219226] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 930.219389] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 930.219562] env[68437]: DEBUG nova.virt.hardware [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 930.225879] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Reconfiguring VM instance instance-00000037 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 930.226206] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bbfc629-24fc-4727-a771-ce8619caa245 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.248333] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 930.248333] env[68437]: value = "task-2944362" [ 930.248333] env[68437]: _type = "Task" [ 930.248333] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.256970] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944362, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.305907] env[68437]: DEBUG nova.compute.manager [None req-a8ba1591-853a-4e98-b08c-db992fd1594b tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.307106] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cfb1ab6-afd0-4c70-ad7c-dec3475e3925 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.361079] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524e546c-6945-0f10-6a66-a852a573e7b2, 'name': SearchDatastore_Task, 'duration_secs': 0.01591} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.361426] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.361664] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 930.361896] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.362097] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.362299] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.362574] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcb92abf-7e9b-490b-aae4-df6f7761a093 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.374172] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.374374] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.375135] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77ae0374-fa39-4b35-aad1-8824a8e69b8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.381341] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 930.381341] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5278b31f-b187-05e9-f9ed-3e16e26892de" [ 930.381341] env[68437]: _type = "Task" [ 930.381341] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.391263] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5278b31f-b187-05e9-f9ed-3e16e26892de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.489099] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquiring lock "6d09b9e8-f701-4548-8ec3-c1d9e69223ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.489370] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "6d09b9e8-f701-4548-8ec3-c1d9e69223ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.489582] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquiring lock "6d09b9e8-f701-4548-8ec3-c1d9e69223ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.489763] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "6d09b9e8-f701-4548-8ec3-c1d9e69223ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.489976] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "6d09b9e8-f701-4548-8ec3-c1d9e69223ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.492491] env[68437]: INFO nova.compute.manager [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Terminating instance [ 930.562273] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d86861e-3936-4066-804f-7f4cd04c369a tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.563508] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.332s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.564863] env[68437]: INFO nova.compute.claims [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 930.759407] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944362, 'name': ReconfigVM_Task, 'duration_secs': 0.190143} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.759710] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Reconfigured VM instance instance-00000037 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 930.760552] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b856520-9412-4027-b4e5-fdfd14d0186b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.783947] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 6d877579-3095-4ee9-bb3e-4d5a9122f1ed/6d877579-3095-4ee9-bb3e-4d5a9122f1ed.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.784334] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-582ef660-85b2-4f34-bb57-e80902beb3e4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.806021] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 930.806021] env[68437]: value = "task-2944363" [ 930.806021] env[68437]: _type = "Task" [ 930.806021] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.813027] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944363, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.818894] env[68437]: INFO nova.compute.manager [None req-a8ba1591-853a-4e98-b08c-db992fd1594b tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] instance snapshotting [ 930.819543] env[68437]: DEBUG nova.objects.instance [None req-a8ba1591-853a-4e98-b08c-db992fd1594b tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lazy-loading 'flavor' on Instance uuid 6d09b9e8-f701-4548-8ec3-c1d9e69223ee {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.895035] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5278b31f-b187-05e9-f9ed-3e16e26892de, 'name': SearchDatastore_Task, 'duration_secs': 0.012395} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.896236] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2edebea-41f7-482f-bb2e-4ec182753ae5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.907242] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 930.907242] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520e13f1-7d44-84fc-0b17-faa809bc721e" [ 930.907242] env[68437]: _type = "Task" [ 930.907242] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.917569] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520e13f1-7d44-84fc-0b17-faa809bc721e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.996253] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquiring lock "refresh_cache-6d09b9e8-f701-4548-8ec3-c1d9e69223ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.996451] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquired lock "refresh_cache-6d09b9e8-f701-4548-8ec3-c1d9e69223ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.996630] env[68437]: DEBUG nova.network.neutron [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 931.316217] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944363, 'name': ReconfigVM_Task, 'duration_secs': 0.304686} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.321225] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 6d877579-3095-4ee9-bb3e-4d5a9122f1ed/6d877579-3095-4ee9-bb3e-4d5a9122f1ed.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 931.321225] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance '6d877579-3095-4ee9-bb3e-4d5a9122f1ed' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 931.325365] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d77af2-0877-4cf1-a0cd-205a5155b551 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.346380] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609bb90f-9595-4183-92c3-2aab343c3cb4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.427710] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520e13f1-7d44-84fc-0b17-faa809bc721e, 'name': SearchDatastore_Task, 'duration_secs': 0.017311} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.431045] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.431045] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 221fcaf9-e17a-4594-90be-9dd49e7df424/221fcaf9-e17a-4594-90be-9dd49e7df424.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 931.431045] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-234aa7ba-beb1-4299-b0de-765a6baa2b6d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.440369] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 931.440369] env[68437]: value = "task-2944364" [ 931.440369] env[68437]: _type = "Task" [ 931.440369] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.450830] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.528534] env[68437]: DEBUG nova.network.neutron [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 931.586344] env[68437]: DEBUG nova.network.neutron [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.827512] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520cfe73-c0d8-4b27-810f-12df748d2673 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.853414] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e617b8-5f0a-4923-9065-91ca5fdd9407 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.858018] env[68437]: DEBUG nova.compute.manager [None req-a8ba1591-853a-4e98-b08c-db992fd1594b tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Instance disappeared during snapshot {{(pid=68437) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 931.874143] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance '6d877579-3095-4ee9-bb3e-4d5a9122f1ed' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 931.951226] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944364, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.031800] env[68437]: DEBUG nova.compute.manager [None req-a8ba1591-853a-4e98-b08c-db992fd1594b tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Found 0 images (rotation: 2) {{(pid=68437) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 932.081420] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d00033-a33e-446f-ac5a-5c014e64d4a4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.090076] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Releasing lock "refresh_cache-6d09b9e8-f701-4548-8ec3-c1d9e69223ee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.090568] env[68437]: DEBUG nova.compute.manager [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 932.090796] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 932.092210] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8e0c10-5f66-469e-b82d-bebfa0524c10 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.096149] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb8fd42-e526-4cad-ab86-8b01a549a3cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.104609] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.131719] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d29a3dc-4436-45aa-9afb-c203f192102f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.134767] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb2ed8d-373e-4def-8835-f9b299721116 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.143197] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e978ca4-5da5-4f84-ae15-5d2588f823f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.148649] env[68437]: DEBUG oslo_vmware.api [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 932.148649] env[68437]: value = "task-2944365" [ 932.148649] env[68437]: _type = "Task" [ 932.148649] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.161225] env[68437]: DEBUG nova.compute.provider_tree [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.168236] env[68437]: DEBUG oslo_vmware.api [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.436390] env[68437]: DEBUG nova.network.neutron [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Port bba0a9a2-7033-420f-baf6-f59f37b8b8b8 binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 932.451368] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639056} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.452032] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 221fcaf9-e17a-4594-90be-9dd49e7df424/221fcaf9-e17a-4594-90be-9dd49e7df424.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 932.452032] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 932.452278] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b362aa17-94a3-48f5-a7a2-37dc5bb8a078 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.461704] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 932.461704] env[68437]: value = "task-2944366" [ 932.461704] env[68437]: _type = "Task" [ 932.461704] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.474402] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944366, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.660931] env[68437]: DEBUG oslo_vmware.api [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944365, 'name': PowerOffVM_Task, 'duration_secs': 0.170422} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.661249] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 932.661429] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 932.661686] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a347a59d-298e-4792-b91c-b4100afdd9df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.663867] env[68437]: DEBUG nova.scheduler.client.report [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.696699] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 932.696943] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 932.697161] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Deleting the datastore file [datastore1] 6d09b9e8-f701-4548-8ec3-c1d9e69223ee {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 932.698031] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e10fe4c-6e73-432d-a4c0-26022839504d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.706623] env[68437]: DEBUG oslo_vmware.api [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for the task: (returnval){ [ 932.706623] env[68437]: value = "task-2944368" [ 932.706623] env[68437]: _type = "Task" [ 932.706623] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.718732] env[68437]: DEBUG oslo_vmware.api [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944368, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.973425] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944366, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087259} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.973761] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.974694] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236941fb-e694-474e-b6b2-d6fdba05b908 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.002672] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 221fcaf9-e17a-4594-90be-9dd49e7df424/221fcaf9-e17a-4594-90be-9dd49e7df424.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.003850] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7295b2b7-91ed-41cb-aaad-01c88e641c60 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.027065] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 933.027065] env[68437]: value = "task-2944369" [ 933.027065] env[68437]: _type = "Task" [ 933.027065] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.037122] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944369, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.169412] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.606s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.170097] env[68437]: DEBUG nova.compute.manager [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 933.173247] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.823s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.174919] env[68437]: INFO nova.compute.claims [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.218862] env[68437]: DEBUG oslo_vmware.api [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Task: {'id': task-2944368, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141863} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.219171] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 933.219367] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 933.219574] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 933.219753] env[68437]: INFO nova.compute.manager [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Took 1.13 seconds to destroy the instance on the hypervisor. [ 933.220025] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 933.220237] env[68437]: DEBUG nova.compute.manager [-] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 933.220327] env[68437]: DEBUG nova.network.neutron [-] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 933.245839] env[68437]: DEBUG nova.network.neutron [-] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 933.473788] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.474282] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.474282] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.539977] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944369, 'name': ReconfigVM_Task, 'duration_secs': 0.459775} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.541662] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 221fcaf9-e17a-4594-90be-9dd49e7df424/221fcaf9-e17a-4594-90be-9dd49e7df424.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.541662] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad6e18c7-4c89-4fe6-ac27-a22ec7b08fe4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.550358] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 933.550358] env[68437]: value = "task-2944370" [ 933.550358] env[68437]: _type = "Task" [ 933.550358] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.560056] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944370, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.681099] env[68437]: DEBUG nova.compute.utils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 933.685106] env[68437]: DEBUG nova.compute.manager [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 933.685414] env[68437]: DEBUG nova.network.neutron [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 933.748412] env[68437]: DEBUG nova.network.neutron [-] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.771188] env[68437]: DEBUG nova.policy [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '788235624af44d4a8ce015e07b50fb08', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38eeb3b94c7343edace03412d5c20cac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 934.059985] env[68437]: DEBUG nova.network.neutron [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Successfully created port: aa3bcc67-cd90-4ea4-8af0-656c009a4631 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 934.067639] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944370, 'name': Rename_Task, 'duration_secs': 0.232105} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.067983] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 934.068306] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39a594bc-65aa-4a3c-ab43-b7a1616f7a4d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.076205] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 934.076205] env[68437]: value = "task-2944371" [ 934.076205] env[68437]: _type = "Task" [ 934.076205] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.087278] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944371, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.185721] env[68437]: DEBUG nova.compute.manager [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 934.251820] env[68437]: INFO nova.compute.manager [-] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Took 1.03 seconds to deallocate network for instance. [ 934.538422] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.538735] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.538768] env[68437]: DEBUG nova.network.neutron [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 934.587519] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944371, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.639264] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99180e2-4e59-46a9-91ac-13eee2ffb30b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.648435] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccbebd6-51d5-4ef2-9de1-87098c527ba0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.686969] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d8ac21-13a5-4853-84b4-cc34637e3a79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.702355] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9aa8c4-59c4-44b3-a579-8b403d92da78 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.718069] env[68437]: DEBUG nova.compute.provider_tree [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.758560] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.088469] env[68437]: DEBUG oslo_vmware.api [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944371, 'name': PowerOnVM_Task, 'duration_secs': 0.677664} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.088795] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 935.089044] env[68437]: INFO nova.compute.manager [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Took 14.88 seconds to spawn the instance on the hypervisor. [ 935.089287] env[68437]: DEBUG nova.compute.manager [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 935.090091] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9feb7251-18ce-4803-8944-1eedb071dcc7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.210772] env[68437]: DEBUG nova.compute.manager [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 935.220790] env[68437]: DEBUG nova.scheduler.client.report [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 935.231511] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 935.231762] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 935.231891] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 935.232079] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 935.232232] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 935.232378] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 935.232588] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 935.232752] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 935.232994] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 935.233098] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 935.233307] env[68437]: DEBUG nova.virt.hardware [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 935.234226] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c16893-0420-4bc6-ae9e-cfa1e257a746 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.243298] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d418944a-49c2-4f7a-abd6-f942defc9c7b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.248045] env[68437]: DEBUG nova.network.neutron [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance_info_cache with network_info: [{"id": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "address": "fa:16:3e:87:d9:eb", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba0a9a2-70", "ovs_interfaceid": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.544143] env[68437]: DEBUG nova.compute.manager [req-66aeab69-eef3-48a0-8150-1b6163438858 req-23ab837e-2e03-4a8f-a946-2601114ca331 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Received event network-vif-plugged-aa3bcc67-cd90-4ea4-8af0-656c009a4631 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 935.544143] env[68437]: DEBUG oslo_concurrency.lockutils [req-66aeab69-eef3-48a0-8150-1b6163438858 req-23ab837e-2e03-4a8f-a946-2601114ca331 service nova] Acquiring lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.544143] env[68437]: DEBUG oslo_concurrency.lockutils [req-66aeab69-eef3-48a0-8150-1b6163438858 req-23ab837e-2e03-4a8f-a946-2601114ca331 service nova] Lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.544143] env[68437]: DEBUG oslo_concurrency.lockutils [req-66aeab69-eef3-48a0-8150-1b6163438858 req-23ab837e-2e03-4a8f-a946-2601114ca331 service nova] Lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.544143] env[68437]: DEBUG nova.compute.manager [req-66aeab69-eef3-48a0-8150-1b6163438858 req-23ab837e-2e03-4a8f-a946-2601114ca331 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] No waiting events found dispatching network-vif-plugged-aa3bcc67-cd90-4ea4-8af0-656c009a4631 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 935.544143] env[68437]: WARNING nova.compute.manager [req-66aeab69-eef3-48a0-8150-1b6163438858 req-23ab837e-2e03-4a8f-a946-2601114ca331 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Received unexpected event network-vif-plugged-aa3bcc67-cd90-4ea4-8af0-656c009a4631 for instance with vm_state building and task_state spawning. [ 935.609670] env[68437]: INFO nova.compute.manager [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Took 55.96 seconds to build instance. [ 935.644206] env[68437]: DEBUG nova.network.neutron [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Successfully updated port: aa3bcc67-cd90-4ea4-8af0-656c009a4631 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 935.734020] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.734020] env[68437]: DEBUG nova.compute.manager [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 935.735443] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 35.990s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.751280] env[68437]: DEBUG oslo_concurrency.lockutils [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.113648] env[68437]: DEBUG oslo_concurrency.lockutils [None req-423bbf00-a485-4d9d-bff8-f0c4fafceb6a tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.080s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.150031] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquiring lock "refresh_cache-aaa2a858-9cc0-4b5a-8729-80e5440b530d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.150326] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquired lock "refresh_cache-aaa2a858-9cc0-4b5a-8729-80e5440b530d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.150551] env[68437]: DEBUG nova.network.neutron [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 936.241395] env[68437]: DEBUG nova.compute.utils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 936.243231] env[68437]: DEBUG nova.objects.instance [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lazy-loading 'migration_context' on Instance uuid b7706bf2-936f-439c-8e9f-b2241d0c211c {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.244443] env[68437]: DEBUG nova.compute.manager [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 936.244691] env[68437]: DEBUG nova.network.neutron [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 936.277926] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7781b026-09a2-4e23-8cee-08dd87b3a010 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.282366] env[68437]: DEBUG nova.policy [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42280bc8e492437aa17259ace66e1601', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18b5eecfb2734eaf8288932f146e3d5e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 936.299980] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5765dc-d148-4bb9-b7d4-a4a4d895d7e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.307749] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance '6d877579-3095-4ee9-bb3e-4d5a9122f1ed' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 936.394947] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "221fcaf9-e17a-4594-90be-9dd49e7df424" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.395269] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.395491] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.395673] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.395843] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.397967] env[68437]: INFO nova.compute.manager [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Terminating instance [ 936.616838] env[68437]: DEBUG nova.network.neutron [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Successfully created port: 5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.685090] env[68437]: DEBUG nova.network.neutron [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 936.745451] env[68437]: DEBUG nova.compute.manager [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 936.814835] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.815205] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5298ab6-548b-4d71-ac22-49b7fbd5d35d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.822853] env[68437]: DEBUG nova.network.neutron [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Updating instance_info_cache with network_info: [{"id": "aa3bcc67-cd90-4ea4-8af0-656c009a4631", "address": "fa:16:3e:b3:dc:98", "network": {"id": "0cb69e17-3aed-4255-aff8-30546c26c0b9", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1991544982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38eeb3b94c7343edace03412d5c20cac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa3bcc67-cd", "ovs_interfaceid": "aa3bcc67-cd90-4ea4-8af0-656c009a4631", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.826345] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 936.826345] env[68437]: value = "task-2944372" [ 936.826345] env[68437]: _type = "Task" [ 936.826345] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.839525] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.901542] env[68437]: DEBUG nova.compute.manager [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.904016] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.904016] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ed058c-30c8-4450-ab61-c571dd8559b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.913980] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.914394] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcfc9f3b-d106-49b2-ae14-50b702f31d5c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.923026] env[68437]: DEBUG oslo_vmware.api [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 936.923026] env[68437]: value = "task-2944373" [ 936.923026] env[68437]: _type = "Task" [ 936.923026] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.932943] env[68437]: DEBUG oslo_vmware.api [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.201778] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8de73a4-674e-40b7-a0ea-a2b6321169a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.211875] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a74aa24-5465-4c07-8b3c-5c50b449c29d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.250784] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e42d0a-e424-4257-a87b-bcd5fcd12e91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.263903] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87bfb29e-053f-472f-9dad-f1b5f3be8a32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.282481] env[68437]: DEBUG nova.compute.provider_tree [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.325412] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Releasing lock "refresh_cache-aaa2a858-9cc0-4b5a-8729-80e5440b530d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.325885] env[68437]: DEBUG nova.compute.manager [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Instance network_info: |[{"id": "aa3bcc67-cd90-4ea4-8af0-656c009a4631", "address": "fa:16:3e:b3:dc:98", "network": {"id": "0cb69e17-3aed-4255-aff8-30546c26c0b9", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1991544982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38eeb3b94c7343edace03412d5c20cac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa3bcc67-cd", "ovs_interfaceid": "aa3bcc67-cd90-4ea4-8af0-656c009a4631", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 937.326771] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:dc:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa3bcc67-cd90-4ea4-8af0-656c009a4631', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 937.335940] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Creating folder: Project (38eeb3b94c7343edace03412d5c20cac). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 937.336383] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7171b59-5166-4ae1-93a3-0e33a8e168fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.348090] env[68437]: DEBUG oslo_vmware.api [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944372, 'name': PowerOnVM_Task, 'duration_secs': 0.474215} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.348383] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.348591] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-543ca1c8-5cfc-408a-99ea-1b467eeeebdc tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance '6d877579-3095-4ee9-bb3e-4d5a9122f1ed' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 937.353809] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Created folder: Project (38eeb3b94c7343edace03412d5c20cac) in parent group-v590848. [ 937.354018] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Creating folder: Instances. Parent ref: group-v591021. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 937.354526] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a4034a3-0a4a-4ebc-9201-7204c1ca8e1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.365598] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Created folder: Instances in parent group-v591021. [ 937.366038] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 937.366334] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 937.366568] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f084097-3490-4816-90bc-2a1582d2c2ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.387609] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 937.387609] env[68437]: value = "task-2944376" [ 937.387609] env[68437]: _type = "Task" [ 937.387609] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.397494] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944376, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.433617] env[68437]: DEBUG oslo_vmware.api [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944373, 'name': PowerOffVM_Task, 'duration_secs': 0.239335} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.434159] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.434159] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.434512] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8574eac4-a70f-4c78-b15c-322c7d428fb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.464154] env[68437]: DEBUG oslo_vmware.rw_handles [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529fc795-93c7-9629-8e5f-9f0567b8b5ef/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 937.465677] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178c1353-0d76-498b-b32e-09388f9369dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.472950] env[68437]: DEBUG oslo_vmware.rw_handles [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529fc795-93c7-9629-8e5f-9f0567b8b5ef/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 937.473145] env[68437]: ERROR oslo_vmware.rw_handles [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529fc795-93c7-9629-8e5f-9f0567b8b5ef/disk-0.vmdk due to incomplete transfer. [ 937.473390] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-93daf96d-f3bf-42ad-b270-fa9ef65d6258 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.481792] env[68437]: DEBUG oslo_vmware.rw_handles [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529fc795-93c7-9629-8e5f-9f0567b8b5ef/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 937.482523] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Uploaded image b4cab361-c60e-4026-b20f-56f65663461e to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 937.484518] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 937.484775] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-56919b74-054d-456e-9e17-74546cbd7968 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.490937] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 937.490937] env[68437]: value = "task-2944378" [ 937.490937] env[68437]: _type = "Task" [ 937.490937] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.499976] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944378, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.571298] env[68437]: DEBUG nova.compute.manager [req-6ce86ac6-f6bc-403f-ac6b-f942530a8615 req-1e9dc84b-9972-4c9a-ade2-a36db467e394 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Received event network-changed-aa3bcc67-cd90-4ea4-8af0-656c009a4631 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 937.571663] env[68437]: DEBUG nova.compute.manager [req-6ce86ac6-f6bc-403f-ac6b-f942530a8615 req-1e9dc84b-9972-4c9a-ade2-a36db467e394 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Refreshing instance network info cache due to event network-changed-aa3bcc67-cd90-4ea4-8af0-656c009a4631. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 937.571930] env[68437]: DEBUG oslo_concurrency.lockutils [req-6ce86ac6-f6bc-403f-ac6b-f942530a8615 req-1e9dc84b-9972-4c9a-ade2-a36db467e394 service nova] Acquiring lock "refresh_cache-aaa2a858-9cc0-4b5a-8729-80e5440b530d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.572101] env[68437]: DEBUG oslo_concurrency.lockutils [req-6ce86ac6-f6bc-403f-ac6b-f942530a8615 req-1e9dc84b-9972-4c9a-ade2-a36db467e394 service nova] Acquired lock "refresh_cache-aaa2a858-9cc0-4b5a-8729-80e5440b530d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.572309] env[68437]: DEBUG nova.network.neutron [req-6ce86ac6-f6bc-403f-ac6b-f942530a8615 req-1e9dc84b-9972-4c9a-ade2-a36db467e394 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Refreshing network info cache for port aa3bcc67-cd90-4ea4-8af0-656c009a4631 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 937.601552] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.601731] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.601910] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Deleting the datastore file [datastore1] 221fcaf9-e17a-4594-90be-9dd49e7df424 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.602202] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d338b8e-181e-4401-96c3-f499f9cc6dd6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.610118] env[68437]: DEBUG oslo_vmware.api [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 937.610118] env[68437]: value = "task-2944379" [ 937.610118] env[68437]: _type = "Task" [ 937.610118] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.618564] env[68437]: DEBUG oslo_vmware.api [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.758296] env[68437]: DEBUG nova.compute.manager [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 937.782865] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 937.783150] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 937.783316] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 937.783541] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 937.783697] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 937.783850] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 937.784095] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 937.784271] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 937.784454] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 937.784628] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 937.784805] env[68437]: DEBUG nova.virt.hardware [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 937.785731] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2c4812-25b7-44c3-8fe0-7f908ebdbe3f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.789071] env[68437]: DEBUG nova.scheduler.client.report [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 937.798721] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1193e414-4b6b-4bcb-b2d3-d699c8f3ed75 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.897257] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944376, 'name': CreateVM_Task, 'duration_secs': 0.421806} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.897447] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 937.898102] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.898276] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.898601] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 937.898849] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94323da4-f910-40fc-994a-40afe8fba421 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.903438] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 937.903438] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520fda4d-0aa8-db82-3234-e6399ff97309" [ 937.903438] env[68437]: _type = "Task" [ 937.903438] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.910916] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520fda4d-0aa8-db82-3234-e6399ff97309, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.001159] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944378, 'name': Destroy_Task, 'duration_secs': 0.356006} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.001463] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Destroyed the VM [ 938.001709] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 938.001970] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-aa41fb6e-a5bc-46da-9d31-90c517a0a8f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.009286] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 938.009286] env[68437]: value = "task-2944380" [ 938.009286] env[68437]: _type = "Task" [ 938.009286] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.017053] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944380, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.121886] env[68437]: DEBUG oslo_vmware.api [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249687} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.121886] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.121886] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.121886] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.121886] env[68437]: INFO nova.compute.manager [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Took 1.22 seconds to destroy the instance on the hypervisor. [ 938.122204] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.122387] env[68437]: DEBUG nova.compute.manager [-] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 938.122479] env[68437]: DEBUG nova.network.neutron [-] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 938.337369] env[68437]: DEBUG nova.network.neutron [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Successfully updated port: 5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.412186] env[68437]: DEBUG nova.network.neutron [req-6ce86ac6-f6bc-403f-ac6b-f942530a8615 req-1e9dc84b-9972-4c9a-ade2-a36db467e394 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Updated VIF entry in instance network info cache for port aa3bcc67-cd90-4ea4-8af0-656c009a4631. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 938.412186] env[68437]: DEBUG nova.network.neutron [req-6ce86ac6-f6bc-403f-ac6b-f942530a8615 req-1e9dc84b-9972-4c9a-ade2-a36db467e394 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Updating instance_info_cache with network_info: [{"id": "aa3bcc67-cd90-4ea4-8af0-656c009a4631", "address": "fa:16:3e:b3:dc:98", "network": {"id": "0cb69e17-3aed-4255-aff8-30546c26c0b9", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1991544982-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38eeb3b94c7343edace03412d5c20cac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20f072ba-9cfa-4ae8-a56c-d3082cbe6f5e", "external-id": "nsx-vlan-transportzone-594", "segmentation_id": 594, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa3bcc67-cd", "ovs_interfaceid": "aa3bcc67-cd90-4ea4-8af0-656c009a4631", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.421722] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520fda4d-0aa8-db82-3234-e6399ff97309, 'name': SearchDatastore_Task, 'duration_secs': 0.010317} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.423094] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.423356] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.423650] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.423761] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.423955] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 938.424530] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24e5cfc0-bccd-4293-b478-fe2abfd39bc9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.435837] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 938.436284] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 938.437017] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53accd9b-947b-4886-978b-153b807a0cd2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.443783] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 938.443783] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e7f402-af29-7bc1-e2c6-ab06796e712b" [ 938.443783] env[68437]: _type = "Task" [ 938.443783] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.452869] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e7f402-af29-7bc1-e2c6-ab06796e712b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.522334] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944380, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.626253] env[68437]: DEBUG nova.compute.manager [req-1a33e38e-7946-487f-b46a-fc425ecf1644 req-12f53164-b19b-4fbf-ab0a-5373638234b4 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received event network-vif-deleted-309987c8-0aed-4755-a8e0-b6416865aff4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 938.626531] env[68437]: INFO nova.compute.manager [req-1a33e38e-7946-487f-b46a-fc425ecf1644 req-12f53164-b19b-4fbf-ab0a-5373638234b4 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Neutron deleted interface 309987c8-0aed-4755-a8e0-b6416865aff4; detaching it from the instance and deleting it from the info cache [ 938.627486] env[68437]: DEBUG nova.network.neutron [req-1a33e38e-7946-487f-b46a-fc425ecf1644 req-12f53164-b19b-4fbf-ab0a-5373638234b4 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Updating instance_info_cache with network_info: [{"id": "c38e66ad-bd88-409c-812a-3fbbee675bfa", "address": "fa:16:3e:b3:1c:32", "network": {"id": "213da79d-a765-461c-b6d7-6b9e54eaed18", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1557044246", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.70", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38e66ad-bd", "ovs_interfaceid": "c38e66ad-bd88-409c-812a-3fbbee675bfa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "93185981-83ed-4eb0-b83c-0ca6e684d267", "address": "fa:16:3e:3e:8a:00", "network": {"id": "0db9f38c-b42a-4538-923a-8bc987f7d133", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1057049740", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93185981-83", "ovs_interfaceid": "93185981-83ed-4eb0-b83c-0ca6e684d267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.808405] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.073s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.814202] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.745s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.814442] env[68437]: DEBUG nova.objects.instance [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lazy-loading 'resources' on Instance uuid 39c532b1-b05e-4354-ad8f-9223b06e9488 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.840046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "refresh_cache-4f46132c-155d-4def-b017-7fd84e37eed5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.840046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired lock "refresh_cache-4f46132c-155d-4def-b017-7fd84e37eed5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.840046] env[68437]: DEBUG nova.network.neutron [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 938.922864] env[68437]: DEBUG oslo_concurrency.lockutils [req-6ce86ac6-f6bc-403f-ac6b-f942530a8615 req-1e9dc84b-9972-4c9a-ade2-a36db467e394 service nova] Releasing lock "refresh_cache-aaa2a858-9cc0-4b5a-8729-80e5440b530d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.955631] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e7f402-af29-7bc1-e2c6-ab06796e712b, 'name': SearchDatastore_Task, 'duration_secs': 0.021336} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.956535] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51954089-90d6-4f1e-86da-9e262b77a5de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.962938] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 938.962938] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522d611f-5951-e824-f90d-5e7a66ff7bc1" [ 938.962938] env[68437]: _type = "Task" [ 938.962938] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.972638] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522d611f-5951-e824-f90d-5e7a66ff7bc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.022071] env[68437]: DEBUG oslo_vmware.api [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944380, 'name': RemoveSnapshot_Task, 'duration_secs': 0.657548} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.022071] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 939.022071] env[68437]: INFO nova.compute.manager [None req-605bd8f6-3936-4ef1-9ffa-599ff293b480 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Took 15.36 seconds to snapshot the instance on the hypervisor. [ 939.130608] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e69f9fb7-4d61-4d09-b622-141a59e10182 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.140861] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7fd088-94bf-4d7c-9e33-5d4301563026 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.178910] env[68437]: DEBUG nova.compute.manager [req-1a33e38e-7946-487f-b46a-fc425ecf1644 req-12f53164-b19b-4fbf-ab0a-5373638234b4 service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Detach interface failed, port_id=309987c8-0aed-4755-a8e0-b6416865aff4, reason: Instance 221fcaf9-e17a-4594-90be-9dd49e7df424 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 939.371840] env[68437]: DEBUG nova.network.neutron [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 939.406483] env[68437]: DEBUG nova.network.neutron [-] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.478546] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522d611f-5951-e824-f90d-5e7a66ff7bc1, 'name': SearchDatastore_Task, 'duration_secs': 0.011809} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.478828] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.479110] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] aaa2a858-9cc0-4b5a-8729-80e5440b530d/aaa2a858-9cc0-4b5a-8729-80e5440b530d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.479431] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a962068b-29b2-451c-9082-c5c0df18dc86 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.486941] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 939.486941] env[68437]: value = "task-2944381" [ 939.486941] env[68437]: _type = "Task" [ 939.486941] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.499119] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.545029] env[68437]: DEBUG nova.network.neutron [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Updating instance_info_cache with network_info: [{"id": "5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5", "address": "fa:16:3e:97:d8:a5", "network": {"id": "2de0f0ee-17ab-497a-adc4-23b69d5d0a9f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-277054154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b5eecfb2734eaf8288932f146e3d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b0d9027-1e", "ovs_interfaceid": "5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.685969] env[68437]: DEBUG nova.compute.manager [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Received event network-vif-plugged-5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 939.686246] env[68437]: DEBUG oslo_concurrency.lockutils [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] Acquiring lock "4f46132c-155d-4def-b017-7fd84e37eed5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.686510] env[68437]: DEBUG oslo_concurrency.lockutils [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] Lock "4f46132c-155d-4def-b017-7fd84e37eed5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.686698] env[68437]: DEBUG oslo_concurrency.lockutils [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] Lock "4f46132c-155d-4def-b017-7fd84e37eed5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.686873] env[68437]: DEBUG nova.compute.manager [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] No waiting events found dispatching network-vif-plugged-5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 939.687960] env[68437]: WARNING nova.compute.manager [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Received unexpected event network-vif-plugged-5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5 for instance with vm_state building and task_state spawning. [ 939.688243] env[68437]: DEBUG nova.compute.manager [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Received event network-changed-5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 939.688485] env[68437]: DEBUG nova.compute.manager [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Refreshing instance network info cache due to event network-changed-5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 939.688722] env[68437]: DEBUG oslo_concurrency.lockutils [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] Acquiring lock "refresh_cache-4f46132c-155d-4def-b017-7fd84e37eed5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.812247] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b797d0d8-4670-4c6c-80b2-40ec56a4ef94 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.822427] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebedcdd6-dc74-4acd-9ccb-75db30d06bdf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.864699] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a52fa55-2263-4d5d-98bc-3c87aed0a165 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.874298] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e8e2dd-1aa9-412d-847a-9c193a8a197b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.892741] env[68437]: DEBUG nova.compute.provider_tree [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.908793] env[68437]: INFO nova.compute.manager [-] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Took 1.79 seconds to deallocate network for instance. [ 939.997329] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496704} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.997799] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] aaa2a858-9cc0-4b5a-8729-80e5440b530d/aaa2a858-9cc0-4b5a-8729-80e5440b530d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.998047] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 939.998321] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3f23180-9c59-40a1-9886-7c17b7ddfdd6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.007308] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 940.007308] env[68437]: value = "task-2944382" [ 940.007308] env[68437]: _type = "Task" [ 940.007308] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.017554] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944382, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.049119] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Releasing lock "refresh_cache-4f46132c-155d-4def-b017-7fd84e37eed5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.049538] env[68437]: DEBUG nova.compute.manager [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Instance network_info: |[{"id": "5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5", "address": "fa:16:3e:97:d8:a5", "network": {"id": "2de0f0ee-17ab-497a-adc4-23b69d5d0a9f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-277054154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b5eecfb2734eaf8288932f146e3d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b0d9027-1e", "ovs_interfaceid": "5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 940.049843] env[68437]: DEBUG oslo_concurrency.lockutils [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] Acquired lock "refresh_cache-4f46132c-155d-4def-b017-7fd84e37eed5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.050033] env[68437]: DEBUG nova.network.neutron [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Refreshing network info cache for port 5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 940.051327] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:d8:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '48937bd1-23dc-413f-b46b-59bf9e709aa0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.059093] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.060144] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.060383] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03a44abe-e625-488b-a033-319139514db9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.082352] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.082352] env[68437]: value = "task-2944383" [ 940.082352] env[68437]: _type = "Task" [ 940.082352] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.090925] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944383, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.242494] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.243155] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.243155] env[68437]: DEBUG nova.compute.manager [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Going to confirm migration 3 {{(pid=68437) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 940.377167] env[68437]: INFO nova.compute.manager [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Swapping old allocation on dict_keys(['422e986f-b38b-46ad-94b3-91f3ccd10a05']) held by migration bcb29f8b-d96e-4a85-b515-47c3a6a001dc for instance [ 940.396229] env[68437]: DEBUG nova.scheduler.client.report [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.403305] env[68437]: DEBUG nova.scheduler.client.report [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Overwriting current allocation {'allocations': {'422e986f-b38b-46ad-94b3-91f3ccd10a05': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 83}}, 'project_id': 'e4b1c4be5c524504ae9346d2e4ec8008', 'user_id': '17bdd32924094cc9a59a1cb1c27f0c36', 'consumer_generation': 1} on consumer b7706bf2-936f-439c-8e9f-b2241d0c211c {{(pid=68437) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 940.416782] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.512934] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.513137] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquired lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.513317] env[68437]: DEBUG nova.network.neutron [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 940.520381] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944382, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109213} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.520618] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 940.521437] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5fa9e6-c2c6-42f7-bcc0-8ad93ee1b5d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.544150] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] aaa2a858-9cc0-4b5a-8729-80e5440b530d/aaa2a858-9cc0-4b5a-8729-80e5440b530d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.544685] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9dec400-cfe4-4521-b222-d2f1cf88ee83 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.565068] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 940.565068] env[68437]: value = "task-2944384" [ 940.565068] env[68437]: _type = "Task" [ 940.565068] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.572953] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944384, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.597022] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944383, 'name': CreateVM_Task, 'duration_secs': 0.370667} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.597267] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 940.598099] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.598345] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.598712] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 940.599072] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70c6c9d2-cebd-43a7-b6cf-f1b777e0e0a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.604921] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 940.604921] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524b9b89-9beb-ba86-89bc-e788c4d2119b" [ 940.604921] env[68437]: _type = "Task" [ 940.604921] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.615045] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524b9b89-9beb-ba86-89bc-e788c4d2119b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.656300] env[68437]: DEBUG nova.compute.manager [req-1881db9c-0c88-4615-93d1-ec19fb13e5cb req-0802f996-3bb3-49e9-b6b8-36464659235c service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received event network-vif-deleted-93185981-83ed-4eb0-b83c-0ca6e684d267 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 940.656531] env[68437]: DEBUG nova.compute.manager [req-1881db9c-0c88-4615-93d1-ec19fb13e5cb req-0802f996-3bb3-49e9-b6b8-36464659235c service nova] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Received event network-vif-deleted-c38e66ad-bd88-409c-812a-3fbbee675bfa {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 940.797787] env[68437]: DEBUG nova.network.neutron [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Updated VIF entry in instance network info cache for port 5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 940.798562] env[68437]: DEBUG nova.network.neutron [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Updating instance_info_cache with network_info: [{"id": "5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5", "address": "fa:16:3e:97:d8:a5", "network": {"id": "2de0f0ee-17ab-497a-adc4-23b69d5d0a9f", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-277054154-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18b5eecfb2734eaf8288932f146e3d5e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "48937bd1-23dc-413f-b46b-59bf9e709aa0", "external-id": "nsx-vlan-transportzone-160", "segmentation_id": 160, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b0d9027-1e", "ovs_interfaceid": "5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.821715] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.821910] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.822096] env[68437]: DEBUG nova.network.neutron [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 940.822282] env[68437]: DEBUG nova.objects.instance [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lazy-loading 'info_cache' on Instance uuid 6d877579-3095-4ee9-bb3e-4d5a9122f1ed {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.901252] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.087s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.903609] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.563s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.903883] env[68437]: DEBUG nova.objects.instance [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lazy-loading 'resources' on Instance uuid de54bc8d-2626-41fc-970a-865a842a932e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.919954] env[68437]: INFO nova.scheduler.client.report [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Deleted allocations for instance 39c532b1-b05e-4354-ad8f-9223b06e9488 [ 941.075929] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944384, 'name': ReconfigVM_Task, 'duration_secs': 0.324792} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.078341] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Reconfigured VM instance instance-00000041 to attach disk [datastore1] aaa2a858-9cc0-4b5a-8729-80e5440b530d/aaa2a858-9cc0-4b5a-8729-80e5440b530d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.078956] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ba3a281-b1e9-4a86-8e82-d02c04593957 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.086074] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 941.086074] env[68437]: value = "task-2944385" [ 941.086074] env[68437]: _type = "Task" [ 941.086074] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.094268] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944385, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.116560] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524b9b89-9beb-ba86-89bc-e788c4d2119b, 'name': SearchDatastore_Task, 'duration_secs': 0.011787} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.116856] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.117105] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 941.117487] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.117650] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 941.117891] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.118194] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56bbaad3-e00c-4757-9bbf-495fca27c1f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.127510] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.127698] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 941.128430] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7b64312-58e9-42b9-9195-5e07c27a4992 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.134182] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 941.134182] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1e2f6-8b44-47ed-f742-7d752de77593" [ 941.134182] env[68437]: _type = "Task" [ 941.134182] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.142515] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1e2f6-8b44-47ed-f742-7d752de77593, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.213255] env[68437]: DEBUG nova.network.neutron [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance_info_cache with network_info: [{"id": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "address": "fa:16:3e:17:4e:f5", "network": {"id": "62855e12-f0dc-4d6e-abe1-8be5cc213f6f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a61f759776444b78ab0e8a39df9260fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dc7aa55d-223a-4157-9137-88dc492f2db2", "external-id": "nsx-vlan-transportzone-290", "segmentation_id": 290, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c053c01-e5", "ovs_interfaceid": "6c053c01-e575-4bdc-93ce-3604fa26d1ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.301151] env[68437]: DEBUG oslo_concurrency.lockutils [req-7af9d50a-42bf-4811-bf97-04023f8612de req-d8d55762-fbcb-419f-8b20-375cec598068 service nova] Releasing lock "refresh_cache-4f46132c-155d-4def-b017-7fd84e37eed5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.361224] env[68437]: DEBUG nova.compute.manager [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 941.362269] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6adf74b-b1c5-423a-8479-390fb5023c79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.428049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-eb5a0685-c514-4c3b-8f69-2277564c1aec tempest-FloatingIPsAssociationTestJSON-418697059 tempest-FloatingIPsAssociationTestJSON-418697059-project-member] Lock "39c532b1-b05e-4354-ad8f-9223b06e9488" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.306s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.598753] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944385, 'name': Rename_Task, 'duration_secs': 0.15982} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.599107] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 941.599386] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1401fa87-aa13-454e-a3cf-d0c6b1e9e19d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.609358] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 941.609358] env[68437]: value = "task-2944386" [ 941.609358] env[68437]: _type = "Task" [ 941.609358] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.622116] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944386, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.650578] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1e2f6-8b44-47ed-f742-7d752de77593, 'name': SearchDatastore_Task, 'duration_secs': 0.011338} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.651535] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce80dfbc-96f7-4af5-975e-de7a16a7488c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.657255] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 941.657255] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5282fd79-4b2c-f05c-3828-c4cd3b040886" [ 941.657255] env[68437]: _type = "Task" [ 941.657255] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.668102] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5282fd79-4b2c-f05c-3828-c4cd3b040886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.716192] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Releasing lock "refresh_cache-b7706bf2-936f-439c-8e9f-b2241d0c211c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.716657] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.716931] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0f75ce2-fe12-400e-a2fc-bc9163968db4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.725685] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 941.725685] env[68437]: value = "task-2944387" [ 941.725685] env[68437]: _type = "Task" [ 941.725685] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.738352] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944387, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.809310] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13fbc438-063d-4532-9cf1-92e242ff883f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.824027] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f930401-1c95-4503-9a4a-640a37664390 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.859366] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c567e52-f606-4344-8e39-e6e61bf02ea0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.868553] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1218161e-9567-4ade-be8d-5b2dc10b12b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.886857] env[68437]: INFO nova.compute.manager [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] instance snapshotting [ 941.889195] env[68437]: DEBUG nova.compute.provider_tree [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.891602] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2424304f-1af2-4b20-be17-6903af7deb45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.653408] env[68437]: DEBUG nova.scheduler.client.report [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.667995] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83696ded-1e0e-403f-802e-04402c63f74f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.683290] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5282fd79-4b2c-f05c-3828-c4cd3b040886, 'name': SearchDatastore_Task, 'duration_secs': 0.011023} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.683531] env[68437]: DEBUG oslo_vmware.api [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944386, 'name': PowerOnVM_Task, 'duration_secs': 0.518837} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.683760] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944387, 'name': PowerOffVM_Task, 'duration_secs': 0.196186} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.685471] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.685747] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 4f46132c-155d-4def-b017-7fd84e37eed5/4f46132c-155d-4def-b017-7fd84e37eed5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 942.686032] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 942.686235] env[68437]: INFO nova.compute.manager [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Took 7.48 seconds to spawn the instance on the hypervisor. [ 942.686411] env[68437]: DEBUG nova.compute.manager [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 942.686669] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.687345] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:39:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d926657d-4f9c-4856-bc8e-d77db590d34a',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1416034897',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 942.687555] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.687710] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 942.687892] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.688049] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 942.688198] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 942.688474] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 942.688559] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 942.688721] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 942.690268] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 942.690268] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 942.698452] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2492aa65-c891-4933-a966-1f0564eb4c24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.702489] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a996237e-b170-432c-b14d-ef939fcb2e66 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.705273] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c82a1ac0-25a5-4741-bbde-dafad26a005c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.731514] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 942.731514] env[68437]: value = "task-2944388" [ 942.731514] env[68437]: _type = "Task" [ 942.731514] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.731849] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 942.731849] env[68437]: value = "task-2944389" [ 942.731849] env[68437]: _type = "Task" [ 942.731849] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.745355] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944388, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.747957] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944389, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.941353] env[68437]: DEBUG nova.network.neutron [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance_info_cache with network_info: [{"id": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "address": "fa:16:3e:87:d9:eb", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbba0a9a2-70", "ovs_interfaceid": "bba0a9a2-7033-420f-baf6-f59f37b8b8b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.159053] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.255s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.161150] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 39.328s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.191877] env[68437]: INFO nova.scheduler.client.report [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Deleted allocations for instance de54bc8d-2626-41fc-970a-865a842a932e [ 943.199444] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 943.200719] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-89bd0135-ccb4-4d25-a5f9-caea651cbdd1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.212874] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 943.212874] env[68437]: value = "task-2944390" [ 943.212874] env[68437]: _type = "Task" [ 943.212874] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.226751] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944390, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.253062] env[68437]: INFO nova.compute.manager [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Took 45.04 seconds to build instance. [ 943.260159] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944389, 'name': ReconfigVM_Task, 'duration_secs': 0.203139} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.266914] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944388, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.267873] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ae648d-6c35-4e97-94e0-450be689dbd3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.288306] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:39:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d926657d-4f9c-4856-bc8e-d77db590d34a',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1416034897',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 943.288573] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.288737] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 943.288921] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.289085] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 943.289244] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 943.289539] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 943.289669] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 943.289844] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 943.290014] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 943.290194] env[68437]: DEBUG nova.virt.hardware [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 943.291075] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8d35d93-286e-47a0-a18b-1ae4ac4f8e37 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.297374] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 943.297374] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52cfea3a-4b27-b054-0a65-4cbb6c16c903" [ 943.297374] env[68437]: _type = "Task" [ 943.297374] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.306318] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52cfea3a-4b27-b054-0a65-4cbb6c16c903, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.443997] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-6d877579-3095-4ee9-bb3e-4d5a9122f1ed" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.444321] env[68437]: DEBUG nova.objects.instance [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lazy-loading 'migration_context' on Instance uuid 6d877579-3095-4ee9-bb3e-4d5a9122f1ed {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 943.699241] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34cc401f-c48c-4f7f-85a8-94884cb45707 tempest-ServerGroupTestJSON-118355594 tempest-ServerGroupTestJSON-118355594-project-member] Lock "de54bc8d-2626-41fc-970a-865a842a932e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.327s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.724699] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944390, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.753284] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944388, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571522} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.753742] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 4f46132c-155d-4def-b017-7fd84e37eed5/4f46132c-155d-4def-b017-7fd84e37eed5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 943.754877] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.754877] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07e5d5a2-c28a-4601-b820-613ff93f3477 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.757778] env[68437]: DEBUG oslo_concurrency.lockutils [None req-01c2a724-071c-42bc-b095-86f5fd1a2b72 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.552s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.764826] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 943.764826] env[68437]: value = "task-2944391" [ 943.764826] env[68437]: _type = "Task" [ 943.764826] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.774360] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944391, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.808740] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52cfea3a-4b27-b054-0a65-4cbb6c16c903, 'name': SearchDatastore_Task, 'duration_secs': 0.009342} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.814928] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfiguring VM instance instance-0000002e to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 943.815565] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fa51737-d43b-4ffe-acff-b6af1af7d704 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.836034] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 943.836034] env[68437]: value = "task-2944392" [ 943.836034] env[68437]: _type = "Task" [ 943.836034] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.845927] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.946978] env[68437]: DEBUG nova.objects.base [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Object Instance<6d877579-3095-4ee9-bb3e-4d5a9122f1ed> lazy-loaded attributes: info_cache,migration_context {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 943.948181] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0e5391-8133-4c5c-9104-098885858e57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.970636] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c40fe6ba-6e05-4179-94e9-8145bdc893b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.977348] env[68437]: DEBUG oslo_vmware.api [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 943.977348] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520c159c-6fcc-0b4d-e83d-2a60683cf35f" [ 943.977348] env[68437]: _type = "Task" [ 943.977348] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.987904] env[68437]: DEBUG oslo_vmware.api [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520c159c-6fcc-0b4d-e83d-2a60683cf35f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.179463] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Applying migration context for instance 6d877579-3095-4ee9-bb3e-4d5a9122f1ed as it has an incoming, in-progress migration b17182b5-43cf-4749-8362-d1a3df183f00. Migration status is confirming {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 944.181962] env[68437]: INFO nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating resource usage from migration b17182b5-43cf-4749-8362-d1a3df183f00 [ 944.206849] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 19dde8dd-eae6-41a0-b147-c505db1cda15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.207016] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 2f368262-0825-4ccc-9b1e-523b705bcfce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.207150] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance cf394b0b-cb14-4ae1-81bb-622c951bfdab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.207272] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance cf691a81-60e3-40ed-ba80-8f481ff2554b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.207386] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance d5db3112-88c7-43af-a434-b91ca69f8559 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.207498] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 1186da93-57aa-40f4-8aae-702d039844d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.207609] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 013a92cc-0fc2-4e85-aee6-efb62bae4dcb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.207717] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8ccd7176-55c0-4118-a07e-3c4bdbba9795 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.207823] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 29e9555b-f928-43e7-a3a3-869ed07d7326 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.207930] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.208047] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance a01364f9-e30d-4140-ae41-1e7c4aaa2251 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.208186] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ba0d8067-a617-4910-b2f6-33a7be461f8e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 944.208354] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.208389] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance d84c599e-29b2-45ec-a3f7-54ef85af9a3d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.208545] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance c9d26fd4-f780-4986-8a5f-dea041a70f5d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 944.208614] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 832c99fc-0f09-4ccb-96f9-894ce62eb17e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 944.208729] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance b7706bf2-936f-439c-8e9f-b2241d0c211c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.209338] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 353ebb37-7e69-49d4-873e-2272cbfff6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.209475] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.209590] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 4254002c-d292-4f10-a3d0-387853dbbcb3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.209716] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 221fcaf9-e17a-4594-90be-9dd49e7df424 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 944.209836] env[68437]: WARNING nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 6d09b9e8-f701-4548-8ec3-c1d9e69223ee is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 944.209978] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Migration b17182b5-43cf-4749-8362-d1a3df183f00 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 944.210075] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 6d877579-3095-4ee9-bb3e-4d5a9122f1ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.210190] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance aaa2a858-9cc0-4b5a-8729-80e5440b530d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.210297] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 4f46132c-155d-4def-b017-7fd84e37eed5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 944.226620] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944390, 'name': CreateSnapshot_Task, 'duration_secs': 0.559789} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.226620] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 944.227114] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc38792-332d-490d-b1d0-851e90c9f346 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.275659] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944391, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073659} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.275972] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 944.276838] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1dae90-1c83-44f1-90ca-56c9110f593d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.299644] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 4f46132c-155d-4def-b017-7fd84e37eed5/4f46132c-155d-4def-b017-7fd84e37eed5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.300710] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-364d0d84-37ab-412b-a484-50eb0f0bd3c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.323218] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 944.323218] env[68437]: value = "task-2944393" [ 944.323218] env[68437]: _type = "Task" [ 944.323218] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.331935] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944393, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.347174] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944392, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.387458] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31dfa22d-e648-497f-a51b-ff60d8dc5ad4 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquiring lock "interface-aaa2a858-9cc0-4b5a-8729-80e5440b530d-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.388139] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31dfa22d-e648-497f-a51b-ff60d8dc5ad4 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "interface-aaa2a858-9cc0-4b5a-8729-80e5440b530d-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.388809] env[68437]: DEBUG nova.objects.instance [None req-31dfa22d-e648-497f-a51b-ff60d8dc5ad4 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lazy-loading 'flavor' on Instance uuid aaa2a858-9cc0-4b5a-8729-80e5440b530d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.488694] env[68437]: DEBUG oslo_vmware.api [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520c159c-6fcc-0b4d-e83d-2a60683cf35f, 'name': SearchDatastore_Task, 'duration_secs': 0.018246} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.488981] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.713137] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 944.748078] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 944.748716] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d6a6cd9b-6d98-4212-a5fe-27cd205c3fa2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.758476] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 944.758476] env[68437]: value = "task-2944394" [ 944.758476] env[68437]: _type = "Task" [ 944.758476] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.767640] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944394, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.834272] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944393, 'name': ReconfigVM_Task, 'duration_secs': 0.294663} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.834569] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 4f46132c-155d-4def-b017-7fd84e37eed5/4f46132c-155d-4def-b017-7fd84e37eed5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.835263] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7cdf3056-16fc-439e-b9dd-3e14af4382fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.843942] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 944.843942] env[68437]: value = "task-2944395" [ 944.843942] env[68437]: _type = "Task" [ 944.843942] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.850050] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944392, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.860936] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944395, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.894356] env[68437]: DEBUG nova.objects.instance [None req-31dfa22d-e648-497f-a51b-ff60d8dc5ad4 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lazy-loading 'pci_requests' on Instance uuid aaa2a858-9cc0-4b5a-8729-80e5440b530d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.218968] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.276830] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944394, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.351097] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944392, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.367479] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944395, 'name': Rename_Task, 'duration_secs': 0.37789} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.368240] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.368240] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6393da84-5aa7-451f-adb8-5cb5a601025d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.378281] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 945.378281] env[68437]: value = "task-2944396" [ 945.378281] env[68437]: _type = "Task" [ 945.378281] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.390697] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944396, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.399126] env[68437]: DEBUG nova.objects.base [None req-31dfa22d-e648-497f-a51b-ff60d8dc5ad4 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 945.399126] env[68437]: DEBUG nova.network.neutron [None req-31dfa22d-e648-497f-a51b-ff60d8dc5ad4 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 945.525026] env[68437]: DEBUG oslo_concurrency.lockutils [None req-31dfa22d-e648-497f-a51b-ff60d8dc5ad4 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "interface-aaa2a858-9cc0-4b5a-8729-80e5440b530d-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.137s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.725344] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 3f82b137-81d5-4754-b222-3cefce0b2a10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 945.725344] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 945.725344] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4672MB phys_disk=200GB used_disk=21GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 945.780298] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944394, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.853910] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944392, 'name': ReconfigVM_Task, 'duration_secs': 1.591371} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.854404] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfigured VM instance instance-0000002e to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 945.855206] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f7cbf8-b75a-4c87-b2f4-e6e6b9f2f50a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.887479] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] b7706bf2-936f-439c-8e9f-b2241d0c211c/b7706bf2-936f-439c-8e9f-b2241d0c211c.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.894253] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73ae2c76-bd6f-4c90-948e-53b4796370bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.915605] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944396, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.917286] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 945.917286] env[68437]: value = "task-2944397" [ 945.917286] env[68437]: _type = "Task" [ 945.917286] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.929353] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944397, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.283787] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944394, 'name': CloneVM_Task, 'duration_secs': 1.397733} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.285254] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Created linked-clone VM from snapshot [ 946.285254] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd163b53-3296-406f-a256-7e1334783f89 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.291520] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e967b04c-670f-4711-96e4-f6d0ef768317 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.297527] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Uploading image 412014ac-45d1-4978-a05a-78399127439f {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 946.305522] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9ba7ca-7e87-4413-b122-8e90feee6a16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.340811] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b093a3-5dc1-4e2f-9587-eb99fc93e68f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.350042] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b16c84-e830-438a-b6ac-7c2a4d2161a6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.356077] env[68437]: DEBUG oslo_vmware.rw_handles [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 946.356077] env[68437]: value = "vm-591026" [ 946.356077] env[68437]: _type = "VirtualMachine" [ 946.356077] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 946.356401] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-59846811-39a8-4adb-9a73-c3b488f7dcaf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.371334] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.371963] env[68437]: DEBUG oslo_vmware.rw_handles [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lease: (returnval){ [ 946.371963] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f76d1e-2436-324b-51b6-0fd71759e6b3" [ 946.371963] env[68437]: _type = "HttpNfcLease" [ 946.371963] env[68437]: } obtained for exporting VM: (result){ [ 946.371963] env[68437]: value = "vm-591026" [ 946.371963] env[68437]: _type = "VirtualMachine" [ 946.371963] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 946.372532] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the lease: (returnval){ [ 946.372532] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f76d1e-2436-324b-51b6-0fd71759e6b3" [ 946.372532] env[68437]: _type = "HttpNfcLease" [ 946.372532] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 946.383019] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 946.383019] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f76d1e-2436-324b-51b6-0fd71759e6b3" [ 946.383019] env[68437]: _type = "HttpNfcLease" [ 946.383019] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 946.383019] env[68437]: DEBUG oslo_vmware.rw_handles [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 946.383019] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f76d1e-2436-324b-51b6-0fd71759e6b3" [ 946.383019] env[68437]: _type = "HttpNfcLease" [ 946.383019] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 946.384291] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d225f0d-ce49-4cc3-9f1b-0c97abbbd910 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.396224] env[68437]: DEBUG oslo_vmware.rw_handles [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c9a92c-144c-4ad2-7235-2b9ce33cf827/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 946.396224] env[68437]: DEBUG oslo_vmware.rw_handles [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c9a92c-144c-4ad2-7235-2b9ce33cf827/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 946.400796] env[68437]: DEBUG oslo_vmware.api [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944396, 'name': PowerOnVM_Task, 'duration_secs': 0.666174} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.400796] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.400796] env[68437]: INFO nova.compute.manager [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Took 8.64 seconds to spawn the instance on the hypervisor. [ 946.400796] env[68437]: DEBUG nova.compute.manager [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 946.400796] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a117a02c-2cab-46e7-9063-46e8e3b2536b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.479052] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.520322] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-253752de-ca30-4d9d-97f7-92fa682cfe63 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.874563] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 946.979525] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944397, 'name': ReconfigVM_Task, 'duration_secs': 0.660839} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.983510] env[68437]: INFO nova.compute.manager [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Took 47.65 seconds to build instance. [ 946.984311] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] b7706bf2-936f-439c-8e9f-b2241d0c211c/b7706bf2-936f-439c-8e9f-b2241d0c211c.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.986243] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27ccdbc-3ceb-46e9-b0ad-20df887aa834 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.009103] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2caf31a6-1b9e-4c47-b6ea-10ef0ec659d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.034019] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0c0dfc-d6d2-4064-8272-bd64becef5fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.053918] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb6a7e0-0cf2-4a21-a3d5-fa27717e7a5e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.064095] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 947.064286] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1bda19ac-ec75-41d0-b33d-bc70ee5dff7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.076579] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 947.076579] env[68437]: value = "task-2944399" [ 947.076579] env[68437]: _type = "Task" [ 947.076579] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.086786] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.382664] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 947.382943] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.222s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.383176] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.771s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.383432] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.385786] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.266s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.385988] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.388045] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.387s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.393034] env[68437]: INFO nova.compute.claims [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.395868] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 947.396070] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Cleaning up deleted instances {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 947.433203] env[68437]: INFO nova.scheduler.client.report [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted allocations for instance ba0d8067-a617-4910-b2f6-33a7be461f8e [ 947.451912] env[68437]: INFO nova.scheduler.client.report [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Deleted allocations for instance 832c99fc-0f09-4ccb-96f9-894ce62eb17e [ 947.486507] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7dcc85c5-74f2-4165-8a42-794ac0a4f344 tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "4f46132c-155d-4def-b017-7fd84e37eed5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.167s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.594248] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944399, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.626454] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquiring lock "3a2dad52-63d3-46ec-ac43-3922bca3919e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.627390] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "3a2dad52-63d3-46ec-ac43-3922bca3919e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.747550] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquiring lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.747799] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.747999] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquiring lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.748196] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.748359] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.751742] env[68437]: INFO nova.compute.manager [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Terminating instance [ 947.919272] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] There are 41 instances to clean {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 947.919808] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: de54bc8d-2626-41fc-970a-865a842a932e] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 947.932389] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.932884] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.949499] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0e3cac39-3c48-4b7d-bc41-79429e37562b tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "ba0d8067-a617-4910-b2f6-33a7be461f8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.636s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.965614] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ff3b4fe-e8c8-4b36-9393-4e8e6773bd47 tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "832c99fc-0f09-4ccb-96f9-894ce62eb17e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.375s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.090662] env[68437]: DEBUG oslo_vmware.api [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944399, 'name': PowerOnVM_Task, 'duration_secs': 0.599656} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.090928] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 948.129918] env[68437]: DEBUG nova.compute.manager [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 948.261226] env[68437]: DEBUG nova.compute.manager [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 948.261226] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.261497] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ed076c-929f-4ce3-8797-577e4fd6504a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.273972] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.274337] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9802dd43-37ee-47f1-8320-1717f8be10db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.280467] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "4f46132c-155d-4def-b017-7fd84e37eed5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.280674] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "4f46132c-155d-4def-b017-7fd84e37eed5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.280895] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "4f46132c-155d-4def-b017-7fd84e37eed5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.281058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "4f46132c-155d-4def-b017-7fd84e37eed5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.281225] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "4f46132c-155d-4def-b017-7fd84e37eed5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.284267] env[68437]: DEBUG oslo_vmware.api [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 948.284267] env[68437]: value = "task-2944401" [ 948.284267] env[68437]: _type = "Task" [ 948.284267] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.285075] env[68437]: INFO nova.compute.manager [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Terminating instance [ 948.301776] env[68437]: DEBUG oslo_vmware.api [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.410846] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7c62b7-8e98-44e6-88a7-5f62bb5afc26 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.419999] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4de4a4b-5c84-4a14-99f2-23383f9964e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.453686] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 892bf198-7d05-4995-8137-c57095c5c839] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 948.455291] env[68437]: DEBUG nova.compute.manager [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 948.459987] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23d09fa-88ab-461c-bcfd-4e854430c9a4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.469939] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68221da8-220b-4a68-8433-e60b36697e80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.486557] env[68437]: DEBUG nova.compute.provider_tree [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.663442] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.801069] env[68437]: DEBUG oslo_vmware.api [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944401, 'name': PowerOffVM_Task, 'duration_secs': 0.412563} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.801538] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.801708] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.802089] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3adbb01-3ba6-4085-880c-c819f9eaf373 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.804749] env[68437]: DEBUG nova.compute.manager [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 948.805010] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.806477] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db37dc25-665a-4eb9-a622-2d7f555c5e75 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.816730] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.817069] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0efc6950-dc74-4bfd-ad92-0b197f6bf62a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.824630] env[68437]: DEBUG oslo_vmware.api [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 948.824630] env[68437]: value = "task-2944403" [ 948.824630] env[68437]: _type = "Task" [ 948.824630] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.834126] env[68437]: DEBUG oslo_vmware.api [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.893334] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.893334] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.893334] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Deleting the datastore file [datastore1] aaa2a858-9cc0-4b5a-8729-80e5440b530d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.893334] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f065567-ad08-4b31-b3af-be88ff1021a4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.903707] env[68437]: DEBUG oslo_vmware.api [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for the task: (returnval){ [ 948.903707] env[68437]: value = "task-2944404" [ 948.903707] env[68437]: _type = "Task" [ 948.903707] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.912822] env[68437]: DEBUG oslo_vmware.api [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944404, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.962256] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 2a0772bf-ce23-4579-9bea-7e706a80cd4d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 948.982192] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.989419] env[68437]: DEBUG nova.scheduler.client.report [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.103581] env[68437]: INFO nova.compute.manager [None req-d15ffb2a-da67-4a66-8660-8eaa6e32a572 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance to original state: 'active' [ 949.165342] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.165757] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.166114] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.166409] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.166701] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.169306] env[68437]: INFO nova.compute.manager [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Terminating instance [ 949.337096] env[68437]: DEBUG oslo_vmware.api [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944403, 'name': PowerOffVM_Task, 'duration_secs': 0.373925} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.337407] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 949.337580] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 949.337844] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-baf37e95-1ba8-4c68-9ced-1f3215a3f430 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.417261] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 949.417614] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 949.417705] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Deleting the datastore file [datastore1] 4f46132c-155d-4def-b017-7fd84e37eed5 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.418068] env[68437]: DEBUG oslo_vmware.api [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Task: {'id': task-2944404, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224179} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.418561] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-611a2848-bb76-46c0-a031-c156b338fd08 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.420318] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.420553] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.420703] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.420940] env[68437]: INFO nova.compute.manager [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 949.421194] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.421428] env[68437]: DEBUG nova.compute.manager [-] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 949.421603] env[68437]: DEBUG nova.network.neutron [-] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 949.430011] env[68437]: DEBUG oslo_vmware.api [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for the task: (returnval){ [ 949.430011] env[68437]: value = "task-2944406" [ 949.430011] env[68437]: _type = "Task" [ 949.430011] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.444276] env[68437]: DEBUG oslo_vmware.api [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944406, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.469599] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: fc62ff9d-1bd8-4b32-9e71-41410276802d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 949.495039] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.107s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.495375] env[68437]: DEBUG nova.compute.manager [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 949.498649] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.487s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.500069] env[68437]: INFO nova.compute.claims [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.674310] env[68437]: DEBUG nova.compute.manager [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 949.674556] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.676185] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7473ee5e-61a1-4617-a4a8-8bc5108bc8b3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.686848] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.687129] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3802ea40-800b-46d3-9d29-17da75e9c666 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.694789] env[68437]: DEBUG oslo_vmware.api [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 949.694789] env[68437]: value = "task-2944407" [ 949.694789] env[68437]: _type = "Task" [ 949.694789] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.704593] env[68437]: DEBUG oslo_vmware.api [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944407, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.763435] env[68437]: DEBUG nova.compute.manager [req-f392f4a4-7889-4ea0-b74c-3d58395dfbf5 req-a056bfe9-baed-460c-ac02-d86b93800d09 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Received event network-vif-deleted-aa3bcc67-cd90-4ea4-8af0-656c009a4631 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 949.763435] env[68437]: INFO nova.compute.manager [req-f392f4a4-7889-4ea0-b74c-3d58395dfbf5 req-a056bfe9-baed-460c-ac02-d86b93800d09 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Neutron deleted interface aa3bcc67-cd90-4ea4-8af0-656c009a4631; detaching it from the instance and deleting it from the info cache [ 949.763710] env[68437]: DEBUG nova.network.neutron [req-f392f4a4-7889-4ea0-b74c-3d58395dfbf5 req-a056bfe9-baed-460c-ac02-d86b93800d09 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.941100] env[68437]: DEBUG oslo_vmware.api [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Task: {'id': task-2944406, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.300151} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.942098] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.942331] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.942522] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.942704] env[68437]: INFO nova.compute.manager [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 949.942962] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.943218] env[68437]: DEBUG nova.compute.manager [-] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 949.943549] env[68437]: DEBUG nova.network.neutron [-] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 949.974143] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ed1a81fd-dd4b-4126-96de-3c3f67cdca31] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 950.004448] env[68437]: DEBUG nova.compute.utils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 950.008680] env[68437]: DEBUG nova.compute.manager [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 950.008926] env[68437]: DEBUG nova.network.neutron [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 950.082409] env[68437]: DEBUG nova.policy [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec1074dd1b444e45beadcccfe6671c76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f1c3ca0e78f472e8c127fa68ed610f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 950.107587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "b7706bf2-936f-439c-8e9f-b2241d0c211c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.107587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.107587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "b7706bf2-936f-439c-8e9f-b2241d0c211c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.107587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.107587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.108707] env[68437]: INFO nova.compute.manager [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Terminating instance [ 950.193533] env[68437]: DEBUG nova.network.neutron [-] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.206745] env[68437]: DEBUG oslo_vmware.api [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944407, 'name': PowerOffVM_Task, 'duration_secs': 0.36668} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.207633] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.207870] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.208150] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64de5354-2b37-4128-9528-fd03a8547c5d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.266304] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81616958-de6a-4bed-8979-5cd2a019d3df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.277487] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca786ba0-1288-425d-9a4a-fca8f366d9e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.290970] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 950.291330] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 950.291558] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleting the datastore file [datastore2] 013a92cc-0fc2-4e85-aee6-efb62bae4dcb {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 950.292309] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-675c25da-a891-4246-9295-8c107388188d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.300433] env[68437]: DEBUG oslo_vmware.api [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 950.300433] env[68437]: value = "task-2944409" [ 950.300433] env[68437]: _type = "Task" [ 950.300433] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.322488] env[68437]: DEBUG nova.compute.manager [req-f392f4a4-7889-4ea0-b74c-3d58395dfbf5 req-a056bfe9-baed-460c-ac02-d86b93800d09 service nova] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Detach interface failed, port_id=aa3bcc67-cd90-4ea4-8af0-656c009a4631, reason: Instance aaa2a858-9cc0-4b5a-8729-80e5440b530d could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 950.330020] env[68437]: DEBUG oslo_vmware.api [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.478743] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 26985e45-21ff-40bb-ac2b-c6f3700ccc97] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 950.506241] env[68437]: DEBUG nova.network.neutron [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Successfully created port: 12fcc1b5-700b-4949-9981-774da9ed33b0 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 950.509232] env[68437]: DEBUG nova.compute.manager [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 950.615020] env[68437]: DEBUG nova.compute.manager [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 950.615277] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.616345] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd12abab-5a39-44da-8c58-f0e0cc07767c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.625224] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.627843] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9891dd1-e183-4571-9782-575475ee996b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.636113] env[68437]: DEBUG oslo_vmware.api [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 950.636113] env[68437]: value = "task-2944410" [ 950.636113] env[68437]: _type = "Task" [ 950.636113] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.646380] env[68437]: DEBUG oslo_vmware.api [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944410, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.701224] env[68437]: INFO nova.compute.manager [-] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Took 1.28 seconds to deallocate network for instance. [ 950.812592] env[68437]: DEBUG oslo_vmware.api [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267512} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.815739] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 950.815944] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 950.816140] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 950.816320] env[68437]: INFO nova.compute.manager [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Took 1.14 seconds to destroy the instance on the hypervisor. [ 950.816678] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 950.817450] env[68437]: DEBUG nova.compute.manager [-] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 950.817548] env[68437]: DEBUG nova.network.neutron [-] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 950.846607] env[68437]: DEBUG nova.network.neutron [-] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.972745] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a1b65d-34b9-4999-97e5-4ec97c3ba4ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.981648] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62af484-a065-46ca-a938-13d95f1be659 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.985421] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: f212ea0b-5bf8-4a7f-820c-fd7e9d70aa81] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 951.022828] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275cf5ab-5183-4741-b25f-273204308dd4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.032387] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cd8e20-f589-4b57-b9d0-fda6820cf3ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.049792] env[68437]: DEBUG nova.compute.provider_tree [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.146910] env[68437]: DEBUG oslo_vmware.api [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944410, 'name': PowerOffVM_Task, 'duration_secs': 0.237548} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.147267] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 951.147459] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 951.147718] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1bccaac9-bfba-4c40-8236-4a727b6b0653 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.207935] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.225208] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 951.225458] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 951.225674] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Deleting the datastore file [datastore1] b7706bf2-936f-439c-8e9f-b2241d0c211c {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 951.225987] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba5f36e8-2801-4b2e-b9d8-0771dcafd4e4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.233980] env[68437]: DEBUG oslo_vmware.api [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 951.233980] env[68437]: value = "task-2944412" [ 951.233980] env[68437]: _type = "Task" [ 951.233980] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.243040] env[68437]: DEBUG oslo_vmware.api [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.349187] env[68437]: INFO nova.compute.manager [-] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Took 1.41 seconds to deallocate network for instance. [ 951.488555] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: acbf4c5c-341c-4ebd-ad29-90ebf531aa86] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 951.524474] env[68437]: DEBUG nova.compute.manager [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 951.551976] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a4402fd7bda78696a5a0c48d79e62b6e',container_format='bare',created_at=2025-03-11T18:41:43Z,direct_url=,disk_format='vmdk',id=10f6eb30-f78d-487c-b50f-3e423a5ce5e1,min_disk=1,min_ram=0,name='tempest-test-snap-389300343',owner='3f1c3ca0e78f472e8c127fa68ed610f5',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-11T18:42:00Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 951.552260] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.552419] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 951.552602] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.552763] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 951.552929] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 951.553156] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 951.553317] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 951.553488] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 951.553652] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 951.553834] env[68437]: DEBUG nova.virt.hardware [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 951.554727] env[68437]: DEBUG nova.scheduler.client.report [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.558354] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed847410-c9b8-4365-af70-6fee728d1945 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.561602] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.561817] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.562052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.562288] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.562396] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.569496] env[68437]: INFO nova.compute.manager [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Terminating instance [ 951.573582] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85448de-bb9e-46a4-8ae3-2df74889f099 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.591453] env[68437]: DEBUG nova.network.neutron [-] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.745313] env[68437]: DEBUG oslo_vmware.api [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196708} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.745604] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.745790] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.745974] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.746194] env[68437]: INFO nova.compute.manager [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 951.746511] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.746735] env[68437]: DEBUG nova.compute.manager [-] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 951.746838] env[68437]: DEBUG nova.network.neutron [-] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 951.797140] env[68437]: DEBUG nova.compute.manager [req-bea0e07c-ad7f-49f4-9209-0ba49e935b22 req-0c525035-76e1-4c01-9424-19eedb4d19ae service nova] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Received event network-vif-deleted-5b0d9027-1e7c-4622-a0c9-85f1fcf3b7a5 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 951.797452] env[68437]: DEBUG nova.compute.manager [req-bea0e07c-ad7f-49f4-9209-0ba49e935b22 req-0c525035-76e1-4c01-9424-19eedb4d19ae service nova] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Received event network-vif-deleted-c0002143-a475-44e6-afd5-c6389c790504 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 951.858421] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.992511] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: c5af19d6-5534-45e6-8c9c-dacf30d4fb1a] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 952.062760] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.067256] env[68437]: DEBUG nova.compute.manager [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 952.072912] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.700s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.077926] env[68437]: INFO nova.compute.claims [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.085017] env[68437]: DEBUG nova.compute.manager [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 952.085017] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.085017] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb21ac2-882a-4472-b970-038f4e72ad6f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.093385] env[68437]: INFO nova.compute.manager [-] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Took 1.28 seconds to deallocate network for instance. [ 952.093739] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.095497] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4556992f-a126-428b-b9d1-f06da356935e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.108522] env[68437]: DEBUG oslo_vmware.api [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 952.108522] env[68437]: value = "task-2944413" [ 952.108522] env[68437]: _type = "Task" [ 952.108522] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.120164] env[68437]: DEBUG oslo_vmware.api [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944413, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.184700] env[68437]: DEBUG nova.network.neutron [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Successfully updated port: 12fcc1b5-700b-4949-9981-774da9ed33b0 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.495959] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 995a3eae-c025-4efa-b509-0bf678bb0388] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 952.583918] env[68437]: DEBUG nova.compute.utils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 952.588563] env[68437]: DEBUG nova.compute.manager [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 952.588563] env[68437]: DEBUG nova.network.neutron [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 952.604018] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.619943] env[68437]: DEBUG oslo_vmware.api [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944413, 'name': PowerOffVM_Task, 'duration_secs': 0.266679} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.620269] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.620471] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.620742] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64b7e95b-791b-4af3-b4bb-f1fc82638fca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.631718] env[68437]: DEBUG nova.network.neutron [-] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.642320] env[68437]: DEBUG nova.policy [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '422581409f184842ba7b5ede7910dfa3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae644c44c6d541f5810ee50d16b3e141', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 952.692304] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "refresh_cache-c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.692304] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "refresh_cache-c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.692304] env[68437]: DEBUG nova.network.neutron [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 952.697822] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.698113] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.698351] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Deleting the datastore file [datastore2] cf394b0b-cb14-4ae1-81bb-622c951bfdab {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.699255] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30d4313f-6365-44e4-b237-fe2d5d949f03 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.707989] env[68437]: DEBUG oslo_vmware.api [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for the task: (returnval){ [ 952.707989] env[68437]: value = "task-2944415" [ 952.707989] env[68437]: _type = "Task" [ 952.707989] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.719414] env[68437]: DEBUG oslo_vmware.api [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944415, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.926521] env[68437]: DEBUG nova.network.neutron [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Successfully created port: ea0db909-110c-4015-9c31-5b6aae262d4b {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 952.999809] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 860107df-4e9b-44b1-9e85-b0ee3a827268] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 953.093285] env[68437]: DEBUG nova.compute.manager [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 953.135481] env[68437]: INFO nova.compute.manager [-] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Took 1.39 seconds to deallocate network for instance. [ 953.223620] env[68437]: DEBUG oslo_vmware.api [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Task: {'id': task-2944415, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131299} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.223620] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.223620] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.223620] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.223620] env[68437]: INFO nova.compute.manager [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Took 1.14 seconds to destroy the instance on the hypervisor. [ 953.224062] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.224413] env[68437]: DEBUG nova.compute.manager [-] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 953.224413] env[68437]: DEBUG nova.network.neutron [-] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 953.229407] env[68437]: DEBUG nova.network.neutron [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 953.391229] env[68437]: DEBUG nova.network.neutron [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Updating instance_info_cache with network_info: [{"id": "12fcc1b5-700b-4949-9981-774da9ed33b0", "address": "fa:16:3e:88:3a:b1", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fcc1b5-70", "ovs_interfaceid": "12fcc1b5-700b-4949-9981-774da9ed33b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.505375] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 6b10ff9f-3248-46fe-9cd4-19e0ebbcee77] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 953.574024] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f607ae-dde2-4e6f-ae57-f1fe014b3596 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.581548] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7422150b-3d6f-4529-921a-bc3cc9edb6bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.617449] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d53f8a5-695f-4f2a-9eb8-caff25d50d47 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.626564] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7db895-e191-473d-996e-280d500763cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.645954] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.646079] env[68437]: DEBUG nova.compute.provider_tree [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.896267] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "refresh_cache-c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.896619] env[68437]: DEBUG nova.compute.manager [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Instance network_info: |[{"id": "12fcc1b5-700b-4949-9981-774da9ed33b0", "address": "fa:16:3e:88:3a:b1", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fcc1b5-70", "ovs_interfaceid": "12fcc1b5-700b-4949-9981-774da9ed33b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 953.897086] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:3a:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12fcc1b5-700b-4949-9981-774da9ed33b0', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 953.906637] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.907221] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 953.907468] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd3596c3-2da9-4429-bb5f-d9e262c4c22d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.930529] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 953.930529] env[68437]: value = "task-2944416" [ 953.930529] env[68437]: _type = "Task" [ 953.930529] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.939637] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944416, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.009391] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: efed858a-44b9-45b7-8778-22183549088c] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 954.115686] env[68437]: DEBUG nova.compute.manager [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Received event network-vif-plugged-12fcc1b5-700b-4949-9981-774da9ed33b0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 954.116887] env[68437]: DEBUG oslo_concurrency.lockutils [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] Acquiring lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.116887] env[68437]: DEBUG oslo_concurrency.lockutils [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] Lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.116887] env[68437]: DEBUG oslo_concurrency.lockutils [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] Lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.116887] env[68437]: DEBUG nova.compute.manager [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] No waiting events found dispatching network-vif-plugged-12fcc1b5-700b-4949-9981-774da9ed33b0 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 954.116887] env[68437]: WARNING nova.compute.manager [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Received unexpected event network-vif-plugged-12fcc1b5-700b-4949-9981-774da9ed33b0 for instance with vm_state building and task_state spawning. [ 954.117871] env[68437]: DEBUG nova.compute.manager [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Received event network-vif-deleted-6c053c01-e575-4bdc-93ce-3604fa26d1ee {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 954.118128] env[68437]: DEBUG nova.compute.manager [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Received event network-changed-12fcc1b5-700b-4949-9981-774da9ed33b0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 954.118316] env[68437]: DEBUG nova.compute.manager [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Refreshing instance network info cache due to event network-changed-12fcc1b5-700b-4949-9981-774da9ed33b0. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 954.118513] env[68437]: DEBUG oslo_concurrency.lockutils [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] Acquiring lock "refresh_cache-c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.118653] env[68437]: DEBUG oslo_concurrency.lockutils [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] Acquired lock "refresh_cache-c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.118810] env[68437]: DEBUG nova.network.neutron [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Refreshing network info cache for port 12fcc1b5-700b-4949-9981-774da9ed33b0 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 954.122105] env[68437]: DEBUG nova.compute.manager [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 954.152197] env[68437]: DEBUG nova.scheduler.client.report [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 954.166511] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 954.166790] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.166960] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 954.168227] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.168227] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 954.168227] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 954.168473] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 954.169039] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 954.169039] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 954.170046] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 954.170046] env[68437]: DEBUG nova.virt.hardware [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 954.170970] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05c0aca-073a-4c73-a134-6d9c755d3da2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.186027] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00c3659-4b7f-49ee-8d1e-19d7a11de25d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.349949] env[68437]: DEBUG nova.compute.manager [req-a1b31c83-687a-4275-8b25-8667c4044646 req-bf50b5b3-9a94-47b5-b81c-347020bd366f service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Received event network-vif-deleted-77d0b04c-ecff-4b2e-a001-7248da043b47 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 954.350810] env[68437]: INFO nova.compute.manager [req-a1b31c83-687a-4275-8b25-8667c4044646 req-bf50b5b3-9a94-47b5-b81c-347020bd366f service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Neutron deleted interface 77d0b04c-ecff-4b2e-a001-7248da043b47; detaching it from the instance and deleting it from the info cache [ 954.351020] env[68437]: DEBUG nova.network.neutron [req-a1b31c83-687a-4275-8b25-8667c4044646 req-bf50b5b3-9a94-47b5-b81c-347020bd366f service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.448871] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944416, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.461820] env[68437]: DEBUG nova.network.neutron [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Successfully updated port: ea0db909-110c-4015-9c31-5b6aae262d4b {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 954.515172] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 5202b708-179c-48d2-9c4e-2bb5ab1a6ebb] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 954.658161] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.658971] env[68437]: DEBUG nova.compute.manager [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 954.662590] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.753s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.662590] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.666759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.907s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.667257] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.002s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.668902] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.252s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.669140] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.671043] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 10.182s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.699104] env[68437]: INFO nova.scheduler.client.report [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Deleted allocations for instance 6d09b9e8-f701-4548-8ec3-c1d9e69223ee [ 954.701174] env[68437]: INFO nova.scheduler.client.report [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Deleted allocations for instance 221fcaf9-e17a-4594-90be-9dd49e7df424 [ 954.721031] env[68437]: INFO nova.scheduler.client.report [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Deleted allocations for instance c9d26fd4-f780-4986-8a5f-dea041a70f5d [ 954.749092] env[68437]: DEBUG nova.network.neutron [-] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.855130] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e27b194f-4ef2-40cd-8479-544f68a3737f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.867635] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c93753f-2d1c-4d8a-8e30-bd9567f417d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.884963] env[68437]: DEBUG nova.network.neutron [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Updated VIF entry in instance network info cache for port 12fcc1b5-700b-4949-9981-774da9ed33b0. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 954.885586] env[68437]: DEBUG nova.network.neutron [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Updating instance_info_cache with network_info: [{"id": "12fcc1b5-700b-4949-9981-774da9ed33b0", "address": "fa:16:3e:88:3a:b1", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12fcc1b5-70", "ovs_interfaceid": "12fcc1b5-700b-4949-9981-774da9ed33b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.922040] env[68437]: DEBUG nova.compute.manager [req-a1b31c83-687a-4275-8b25-8667c4044646 req-bf50b5b3-9a94-47b5-b81c-347020bd366f service nova] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Detach interface failed, port_id=77d0b04c-ecff-4b2e-a001-7248da043b47, reason: Instance cf394b0b-cb14-4ae1-81bb-622c951bfdab could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 954.942251] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944416, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.968118] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquiring lock "refresh_cache-2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.968368] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquired lock "refresh_cache-2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.968616] env[68437]: DEBUG nova.network.neutron [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 955.017984] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 39c532b1-b05e-4354-ad8f-9223b06e9488] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 955.063156] env[68437]: DEBUG oslo_vmware.rw_handles [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c9a92c-144c-4ad2-7235-2b9ce33cf827/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 955.064178] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f24e95-66c6-4cb3-95c3-43564c458187 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.071733] env[68437]: DEBUG oslo_vmware.rw_handles [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c9a92c-144c-4ad2-7235-2b9ce33cf827/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 955.071905] env[68437]: ERROR oslo_vmware.rw_handles [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c9a92c-144c-4ad2-7235-2b9ce33cf827/disk-0.vmdk due to incomplete transfer. [ 955.072185] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-50eb4bd7-3781-49fe-995c-1858478d360a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.081069] env[68437]: DEBUG oslo_vmware.rw_handles [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c9a92c-144c-4ad2-7235-2b9ce33cf827/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 955.081359] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Uploaded image 412014ac-45d1-4978-a05a-78399127439f to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 955.083551] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 955.083799] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0852dd80-78b2-40b7-b35e-8018ea2f569f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.091160] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 955.091160] env[68437]: value = "task-2944417" [ 955.091160] env[68437]: _type = "Task" [ 955.091160] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.101385] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944417, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.174566] env[68437]: DEBUG nova.compute.utils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 955.179558] env[68437]: DEBUG nova.compute.manager [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 955.179734] env[68437]: DEBUG nova.network.neutron [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 955.213052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0cb82fca-75e8-4be8-84cb-3d130b8f4561 tempest-ServersAaction247Test-1455053506 tempest-ServersAaction247Test-1455053506-project-member] Lock "6d09b9e8-f701-4548-8ec3-c1d9e69223ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.724s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.215523] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4960d71e-7e96-48e3-8ac1-151b75fecbf9 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "221fcaf9-e17a-4594-90be-9dd49e7df424" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.819s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.232052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9440e172-1695-466d-bdb8-a16af76c82dd tempest-ServerRescueTestJSONUnderV235-1362731111 tempest-ServerRescueTestJSONUnderV235-1362731111-project-member] Lock "c9d26fd4-f780-4986-8a5f-dea041a70f5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.803s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.251389] env[68437]: DEBUG nova.policy [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '894a53f57a104c51945fa90c168a0483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68baf1daffa842b4adb854fe0cec9524', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 955.253885] env[68437]: INFO nova.compute.manager [-] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Took 2.03 seconds to deallocate network for instance. [ 955.389710] env[68437]: DEBUG oslo_concurrency.lockutils [req-fd3fbba3-1ca3-4cf8-8077-71c9ca57541f req-f9c67d9f-d961-49e1-8742-cb491629fad7 service nova] Releasing lock "refresh_cache-c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.450411] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944416, 'name': CreateVM_Task, 'duration_secs': 1.084127} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.450411] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 955.451249] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.451424] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.451882] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 955.452105] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40d4d469-1399-4977-b722-0e6fb961db40 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.458600] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 955.458600] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52fd8858-5462-604a-2f7a-d1f0c0113f52" [ 955.458600] env[68437]: _type = "Task" [ 955.458600] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.476907] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fd8858-5462-604a-2f7a-d1f0c0113f52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.525172] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: e3855111-7678-42c5-a37e-25e8587416aa] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 955.542772] env[68437]: DEBUG nova.network.neutron [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 955.603067] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944417, 'name': Destroy_Task, 'duration_secs': 0.415692} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.606384] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Destroyed the VM [ 955.606626] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 955.607083] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-749b4148-b9ea-4776-88a2-52d24114866a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.615807] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 955.615807] env[68437]: value = "task-2944418" [ 955.615807] env[68437]: _type = "Task" [ 955.615807] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.630343] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944418, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.640842] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1e231e-dcaa-46cf-8398-36b25e2d5e0f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.653678] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc4ee1e-2f75-4d90-8f80-8f88b8530be4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.690188] env[68437]: DEBUG nova.compute.manager [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 955.698020] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519e7945-f6b2-4a29-a0d7-a3958d98f215 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.706947] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad440247-5fba-4f39-a2ca-2c3b5890693f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.723062] env[68437]: DEBUG nova.compute.provider_tree [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.760229] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.863205] env[68437]: DEBUG nova.network.neutron [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Updating instance_info_cache with network_info: [{"id": "ea0db909-110c-4015-9c31-5b6aae262d4b", "address": "fa:16:3e:18:10:86", "network": {"id": "28975516-65a2-46c9-95ef-9751ae2f6eff", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-798581180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae644c44c6d541f5810ee50d16b3e141", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea0db909-11", "ovs_interfaceid": "ea0db909-110c-4015-9c31-5b6aae262d4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.975672] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.976154] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Processing image 10f6eb30-f78d-487c-b50f-3e423a5ce5e1 {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.976677] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1/10f6eb30-f78d-487c-b50f-3e423a5ce5e1.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.977274] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1/10f6eb30-f78d-487c-b50f-3e423a5ce5e1.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.978081] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.978647] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0366298-4959-4b7f-b1bd-376679b77b7b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.992327] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.992327] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.992327] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ba4f758-0eca-42e2-979f-4166212a2a95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.001795] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 956.001795] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524ca34c-bd26-b2c1-96a6-52eafa9d1298" [ 956.001795] env[68437]: _type = "Task" [ 956.001795] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.013982] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524ca34c-bd26-b2c1-96a6-52eafa9d1298, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.032237] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: f1230046-d368-40ee-b1fa-99df4ab15a10] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 956.084405] env[68437]: DEBUG nova.network.neutron [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Successfully created port: bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.132025] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944418, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.151316] env[68437]: DEBUG nova.compute.manager [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Received event network-vif-plugged-ea0db909-110c-4015-9c31-5b6aae262d4b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 956.151534] env[68437]: DEBUG oslo_concurrency.lockutils [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] Acquiring lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.151747] env[68437]: DEBUG oslo_concurrency.lockutils [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] Lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.151921] env[68437]: DEBUG oslo_concurrency.lockutils [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] Lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.152347] env[68437]: DEBUG nova.compute.manager [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] No waiting events found dispatching network-vif-plugged-ea0db909-110c-4015-9c31-5b6aae262d4b {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.152593] env[68437]: WARNING nova.compute.manager [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Received unexpected event network-vif-plugged-ea0db909-110c-4015-9c31-5b6aae262d4b for instance with vm_state building and task_state spawning. [ 956.152803] env[68437]: DEBUG nova.compute.manager [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Received event network-changed-ea0db909-110c-4015-9c31-5b6aae262d4b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 956.153018] env[68437]: DEBUG nova.compute.manager [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Refreshing instance network info cache due to event network-changed-ea0db909-110c-4015-9c31-5b6aae262d4b. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 956.153270] env[68437]: DEBUG oslo_concurrency.lockutils [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] Acquiring lock "refresh_cache-2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.227393] env[68437]: DEBUG nova.scheduler.client.report [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.366508] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Releasing lock "refresh_cache-2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.367123] env[68437]: DEBUG nova.compute.manager [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Instance network_info: |[{"id": "ea0db909-110c-4015-9c31-5b6aae262d4b", "address": "fa:16:3e:18:10:86", "network": {"id": "28975516-65a2-46c9-95ef-9751ae2f6eff", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-798581180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae644c44c6d541f5810ee50d16b3e141", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea0db909-11", "ovs_interfaceid": "ea0db909-110c-4015-9c31-5b6aae262d4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 956.369151] env[68437]: DEBUG oslo_concurrency.lockutils [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] Acquired lock "refresh_cache-2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.369151] env[68437]: DEBUG nova.network.neutron [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Refreshing network info cache for port ea0db909-110c-4015-9c31-5b6aae262d4b {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 956.369403] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:10:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea0db909-110c-4015-9c31-5b6aae262d4b', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.384811] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Creating folder: Project (ae644c44c6d541f5810ee50d16b3e141). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 956.386040] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c731aa75-bd2a-417c-8c1a-d721c90cfc5f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.398928] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Created folder: Project (ae644c44c6d541f5810ee50d16b3e141) in parent group-v590848. [ 956.399191] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Creating folder: Instances. Parent ref: group-v591028. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 956.399454] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f77c75c9-990c-4b12-8f02-3b4c979707dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.412193] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Created folder: Instances in parent group-v591028. [ 956.412483] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 956.412692] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.412909] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62f217a0-4437-4d05-9ec7-2538b9b29ea8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.441963] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.441963] env[68437]: value = "task-2944421" [ 956.441963] env[68437]: _type = "Task" [ 956.441963] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.455877] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944421, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.512971] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Preparing fetch location {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 956.513856] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Fetch image to [datastore1] OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b/OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b.vmdk {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 956.514617] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Downloading stream optimized image 10f6eb30-f78d-487c-b50f-3e423a5ce5e1 to [datastore1] OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b/OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b.vmdk on the data store datastore1 as vApp {{(pid=68437) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 956.514859] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Downloading image file data 10f6eb30-f78d-487c-b50f-3e423a5ce5e1 to the ESX as VM named 'OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b' {{(pid=68437) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 956.537152] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 56cfa7f3-12ad-42d0-a27f-ab8136a903ee] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 956.615172] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 956.615172] env[68437]: value = "resgroup-9" [ 956.615172] env[68437]: _type = "ResourcePool" [ 956.615172] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 956.615172] env[68437]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f32a4cd8-905a-4efd-b69e-184af504aa39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.646788] env[68437]: DEBUG oslo_vmware.api [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944418, 'name': RemoveSnapshot_Task, 'duration_secs': 0.582998} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.648393] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 956.648768] env[68437]: INFO nova.compute.manager [None req-eb743758-eb26-4a61-938b-f655633792b4 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Took 14.76 seconds to snapshot the instance on the hypervisor. [ 956.651595] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lease: (returnval){ [ 956.651595] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e6bff5-d3b8-1dbd-2882-0967bd1879f8" [ 956.651595] env[68437]: _type = "HttpNfcLease" [ 956.651595] env[68437]: } obtained for vApp import into resource pool (val){ [ 956.651595] env[68437]: value = "resgroup-9" [ 956.651595] env[68437]: _type = "ResourcePool" [ 956.651595] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 956.652219] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the lease: (returnval){ [ 956.652219] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e6bff5-d3b8-1dbd-2882-0967bd1879f8" [ 956.652219] env[68437]: _type = "HttpNfcLease" [ 956.652219] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 956.660873] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 956.660873] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e6bff5-d3b8-1dbd-2882-0967bd1879f8" [ 956.660873] env[68437]: _type = "HttpNfcLease" [ 956.660873] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 956.706337] env[68437]: DEBUG nova.compute.manager [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 956.755251] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 956.755881] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.755881] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 956.755881] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.756889] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 956.756889] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 956.756889] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 956.756889] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 956.756889] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 956.757031] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 956.757258] env[68437]: DEBUG nova.virt.hardware [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 956.759557] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f38745-a43e-45ea-bc54-475ebd6a0d21 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.769739] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa85871-9b23-4683-afa1-6d41fc7092b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.953057] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944421, 'name': CreateVM_Task, 'duration_secs': 0.423031} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.953272] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.954048] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.954152] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.954492] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 956.954689] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a449c46-be98-4d61-bdbf-ee62f0030b87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.961302] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 956.961302] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c18678-1ae6-1d1a-ab71-a3e7e511e86d" [ 956.961302] env[68437]: _type = "Task" [ 956.961302] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.970345] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c18678-1ae6-1d1a-ab71-a3e7e511e86d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.044892] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 9a7c248f-5262-4f03-aace-f22c4976bb0f] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 957.165913] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 957.165913] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e6bff5-d3b8-1dbd-2882-0967bd1879f8" [ 957.165913] env[68437]: _type = "HttpNfcLease" [ 957.165913] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 957.195382] env[68437]: DEBUG nova.network.neutron [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Updated VIF entry in instance network info cache for port ea0db909-110c-4015-9c31-5b6aae262d4b. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 957.195830] env[68437]: DEBUG nova.network.neutron [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Updating instance_info_cache with network_info: [{"id": "ea0db909-110c-4015-9c31-5b6aae262d4b", "address": "fa:16:3e:18:10:86", "network": {"id": "28975516-65a2-46c9-95ef-9751ae2f6eff", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-798581180-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ae644c44c6d541f5810ee50d16b3e141", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea0db909-11", "ovs_interfaceid": "ea0db909-110c-4015-9c31-5b6aae262d4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.241161] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.570s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.244244] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.581s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.245855] env[68437]: INFO nova.compute.claims [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.475949] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c18678-1ae6-1d1a-ab71-a3e7e511e86d, 'name': SearchDatastore_Task, 'duration_secs': 0.011673} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.476494] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.476856] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.477307] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.477495] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.477799] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.478268] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e68f9bd8-a95a-4e08-bbc5-52b70bab9173 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.490296] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.490499] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.491288] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d1b6e0c-7cef-4b8b-94e9-0d9b42bae60b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.499980] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 957.499980] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527ec1ae-1726-9801-e2bd-14f85a30f7c3" [ 957.499980] env[68437]: _type = "Task" [ 957.499980] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.508431] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527ec1ae-1726-9801-e2bd-14f85a30f7c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.549111] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: b92efa60-ef18-4578-b00d-6a2438e7eacf] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 957.666791] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 957.666791] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e6bff5-d3b8-1dbd-2882-0967bd1879f8" [ 957.666791] env[68437]: _type = "HttpNfcLease" [ 957.666791] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 957.667521] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 957.667521] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e6bff5-d3b8-1dbd-2882-0967bd1879f8" [ 957.667521] env[68437]: _type = "HttpNfcLease" [ 957.667521] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 957.671376] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b76a58-501b-4583-987b-a600789378c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.682585] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f5c40-a85e-7f37-3634-356c44bf7d10/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 957.682805] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f5c40-a85e-7f37-3634-356c44bf7d10/disk-0.vmdk. {{(pid=68437) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 957.750694] env[68437]: DEBUG oslo_concurrency.lockutils [req-6cd46352-11b1-418a-a022-7e5aa770fabd req-882940b7-bf93-4e7f-94e4-9f3049e6fbf1 service nova] Releasing lock "refresh_cache-2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.764971] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-63617004-b607-474d-a453-e8dc71388a04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.839937] env[68437]: INFO nova.scheduler.client.report [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted allocation for migration b17182b5-43cf-4749-8362-d1a3df183f00 [ 957.995249] env[68437]: DEBUG nova.network.neutron [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Successfully updated port: bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.017546] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527ec1ae-1726-9801-e2bd-14f85a30f7c3, 'name': SearchDatastore_Task, 'duration_secs': 0.019592} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.021876] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2aa6fb6-f5f8-42fa-8959-57ce4cd12b09 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.031032] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 958.031032] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d62b92-e803-b17f-61df-0832ab3066d8" [ 958.031032] env[68437]: _type = "Task" [ 958.031032] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.047195] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d62b92-e803-b17f-61df-0832ab3066d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.054214] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: aa39767e-1ae7-4881-b0a8-e7b66e1ceed2] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 958.309822] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "ada623a8-b0ce-4709-b2af-ad80b464af4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.310078] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.346557] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d9c087ec-4c63-4567-bbc4-2b324fe7a91d tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 18.104s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.434990] env[68437]: DEBUG nova.compute.manager [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-vif-plugged-bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 958.435382] env[68437]: DEBUG oslo_concurrency.lockutils [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.435623] env[68437]: DEBUG oslo_concurrency.lockutils [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.435823] env[68437]: DEBUG oslo_concurrency.lockutils [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.436066] env[68437]: DEBUG nova.compute.manager [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] No waiting events found dispatching network-vif-plugged-bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 958.436211] env[68437]: WARNING nova.compute.manager [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received unexpected event network-vif-plugged-bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c for instance with vm_state building and task_state spawning. [ 958.436419] env[68437]: DEBUG nova.compute.manager [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-changed-bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 958.436624] env[68437]: DEBUG nova.compute.manager [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Refreshing instance network info cache due to event network-changed-bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 958.436848] env[68437]: DEBUG oslo_concurrency.lockutils [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] Acquiring lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.436994] env[68437]: DEBUG oslo_concurrency.lockutils [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] Acquired lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.437193] env[68437]: DEBUG nova.network.neutron [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Refreshing network info cache for port bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 958.503058] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Completed reading data from the image iterator. {{(pid=68437) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 958.503165] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f5c40-a85e-7f37-3634-356c44bf7d10/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 958.503783] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.504933] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1cf767-ab08-49be-9aff-c20ca656303d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.518207] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f5c40-a85e-7f37-3634-356c44bf7d10/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 958.518207] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f5c40-a85e-7f37-3634-356c44bf7d10/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 958.518435] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a9e6d802-9b16-4351-a18b-2f2bba56ff5c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.544764] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d62b92-e803-b17f-61df-0832ab3066d8, 'name': SearchDatastore_Task, 'duration_secs': 0.025889} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.547802] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.548124] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c/2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 958.549674] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b097c6ad-9f6a-4ec5-8e40-4498764470b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.558887] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 1841dcd4-c231-4c6d-aa2f-5ce3cbb2b530] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 958.561352] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 958.561352] env[68437]: value = "task-2944423" [ 958.561352] env[68437]: _type = "Task" [ 958.561352] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.575185] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.693800] env[68437]: DEBUG oslo_vmware.rw_handles [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521f5c40-a85e-7f37-3634-356c44bf7d10/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 958.694105] env[68437]: INFO nova.virt.vmwareapi.images [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Downloaded image file data 10f6eb30-f78d-487c-b50f-3e423a5ce5e1 [ 958.695067] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889e28a0-f5b0-43b4-a3f7-7b4de51dadee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.717261] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f20be1c6-dc06-4960-b7fd-87acc21f27af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.730330] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3265053-d2a8-4f7d-a2a9-72b73d1278d5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.738847] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8374881f-fc31-4a9f-a265-4a85160a1177 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.744903] env[68437]: INFO nova.virt.vmwareapi.images [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] The imported VM was unregistered [ 958.748392] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Caching image {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 958.749334] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating directory with path [datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1 {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.781650] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a908982-4e5e-4d4d-8fda-f78b697a3c11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.784905] env[68437]: DEBUG nova.compute.manager [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 958.786019] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b514bf9-e15f-4a56-bead-f62e19864bc2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.790490] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e08552-b2da-4830-a4d2-c2ece3424941 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.804566] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41df6c28-e786-4873-b308-3e95def1968e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.823774] env[68437]: DEBUG nova.compute.manager [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 958.826835] env[68437]: DEBUG nova.compute.provider_tree [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.830115] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Created directory with path [datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1 {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.830354] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b/OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b.vmdk to [datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1/10f6eb30-f78d-487c-b50f-3e423a5ce5e1.vmdk. {{(pid=68437) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 958.830599] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-7f5311f2-6bfb-483b-a1be-c4b2d9060832 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.838851] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 958.838851] env[68437]: value = "task-2944425" [ 958.838851] env[68437]: _type = "Task" [ 958.838851] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.852584] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944425, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.015044] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "7422ff70-901c-4343-9b9f-f12c52348d2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.015307] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "7422ff70-901c-4343-9b9f-f12c52348d2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.065126] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 67312d87-cc63-4dc7-b9c1-9c8d349a4756] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 959.078686] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944423, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518789} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.078686] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c/2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.078686] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.078686] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ba2e169-887d-4882-b136-fd3f518fbe4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.085010] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 959.085010] env[68437]: value = "task-2944426" [ 959.085010] env[68437]: _type = "Task" [ 959.085010] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.099542] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944426, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.104755] env[68437]: DEBUG nova.network.neutron [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 959.194969] env[68437]: DEBUG nova.network.neutron [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.305916] env[68437]: INFO nova.compute.manager [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] instance snapshotting [ 959.308940] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5d4550-e2e4-463c-af9a-39862981fa02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.333206] env[68437]: DEBUG nova.scheduler.client.report [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.339589] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af9ae31-3388-48c3-bf26-30222b8b0bcb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.354302] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944425, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.361017] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.517906] env[68437]: DEBUG nova.compute.manager [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 959.571796] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: c74569b8-dfc9-4a74-9d25-74b484bd9477] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 959.598456] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944426, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079294} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.599042] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.600517] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb08160d-8df4-4b89-bacd-4106ecae081b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.627116] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c/2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.627954] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49094ccf-55b0-4606-8b5c-68e689845de3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.655033] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 959.655033] env[68437]: value = "task-2944427" [ 959.655033] env[68437]: _type = "Task" [ 959.655033] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.664784] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944427, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.696673] env[68437]: DEBUG oslo_concurrency.lockutils [req-614f3045-1d50-4dff-b677-a1f63503f8ff req-e3421dba-a53d-4966-baf5-e0f08fb92257 service nova] Releasing lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.697113] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.697318] env[68437]: DEBUG nova.network.neutron [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 959.838272] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.838915] env[68437]: DEBUG nova.compute.manager [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 959.844108] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.862s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.845700] env[68437]: INFO nova.compute.claims [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 959.865719] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 959.866057] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944425, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.866313] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-30df37a6-fbeb-4e14-bb71-28e33533dd1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.877135] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 959.877135] env[68437]: value = "task-2944428" [ 959.877135] env[68437]: _type = "Task" [ 959.877135] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.892217] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944428, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.049143] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.075149] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ad773afa-fc0a-4380-901d-af013ce55f2b] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 960.173021] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944427, 'name': ReconfigVM_Task, 'duration_secs': 0.351974} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.173021] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c/2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.173021] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f0c339a-b9c3-48d2-9d13-cfb958f74a39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.185398] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 960.185398] env[68437]: value = "task-2944429" [ 960.185398] env[68437]: _type = "Task" [ 960.185398] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.197782] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944429, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.228146] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.228453] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.228697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.228918] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.229124] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.231576] env[68437]: INFO nova.compute.manager [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Terminating instance [ 960.273108] env[68437]: DEBUG nova.network.neutron [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 960.353865] env[68437]: DEBUG nova.compute.utils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 960.360866] env[68437]: DEBUG nova.compute.manager [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 960.361678] env[68437]: DEBUG nova.network.neutron [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 960.364336] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944425, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.391283] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944428, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.430281] env[68437]: DEBUG nova.network.neutron [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [{"id": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "address": "fa:16:3e:ed:52:c2", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd525d9d-aa", "ovs_interfaceid": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.448267] env[68437]: DEBUG nova.policy [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f179fd55acfc4de4992158ef86155e8f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3520c17d0b3494f94b25778f952356a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 960.578454] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 07d98c5c-ede8-4001-93b2-1b1d83687ca1] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 960.701334] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944429, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.740901] env[68437]: DEBUG nova.compute.manager [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 960.740901] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 960.742695] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9aa4774-aa08-475d-b41e-523d48121a71 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.751171] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "697d5011-fb4e-4542-851b-39953bbb293d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.751432] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.758870] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.758870] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be403280-53be-4fe9-9976-e50a68d59070 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.770038] env[68437]: DEBUG oslo_vmware.api [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 960.770038] env[68437]: value = "task-2944430" [ 960.770038] env[68437]: _type = "Task" [ 960.770038] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.782483] env[68437]: DEBUG oslo_vmware.api [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944430, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.823805] env[68437]: DEBUG nova.network.neutron [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Successfully created port: 7be66ccd-47fb-4b51-ac58-a60ae0578274 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.861050] env[68437]: DEBUG nova.compute.manager [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 960.863895] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944425, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.894742] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944428, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.932719] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.933091] env[68437]: DEBUG nova.compute.manager [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Instance network_info: |[{"id": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "address": "fa:16:3e:ed:52:c2", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd525d9d-aa", "ovs_interfaceid": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 960.934938] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:52:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 960.942749] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 960.946546] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 960.947465] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8e6e0d9-fdfb-4a07-b86b-1718429d598a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.977200] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 960.977200] env[68437]: value = "task-2944431" [ 960.977200] env[68437]: _type = "Task" [ 960.977200] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.993942] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944431, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.085389] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: d7c64aa1-44f8-44f4-9fb6-463033837736] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 961.200136] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944429, 'name': Rename_Task, 'duration_secs': 0.980092} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.203464] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.204760] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6df18c09-a127-4a6c-8120-9e44058927e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.214368] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 961.214368] env[68437]: value = "task-2944432" [ 961.214368] env[68437]: _type = "Task" [ 961.214368] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.229932] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.254340] env[68437]: DEBUG nova.compute.manager [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 961.286058] env[68437]: DEBUG oslo_vmware.api [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944430, 'name': PowerOffVM_Task, 'duration_secs': 0.437267} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.286452] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 961.286753] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.287299] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85f780c1-bbb5-47b4-949c-3659b60b4b34 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.316411] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63d0f88-d695-4d34-a0d7-6dc9f578bf72 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.326461] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08701a5-9bba-4da9-a9f5-3c88790bbe32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.364612] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8782e0-db6d-4814-8a25-a4cedbae19ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.379469] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944425, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.380936] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd234d5-b9ea-406d-af70-d01af22977fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.395937] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944428, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.421806] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.422057] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.422249] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleting the datastore file [datastore1] 6d877579-3095-4ee9-bb3e-4d5a9122f1ed {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.422853] env[68437]: DEBUG nova.compute.provider_tree [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.424364] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d3509fe-6f64-433f-bc20-667f834ca5c6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.434654] env[68437]: DEBUG oslo_vmware.api [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 961.434654] env[68437]: value = "task-2944434" [ 961.434654] env[68437]: _type = "Task" [ 961.434654] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.446829] env[68437]: DEBUG oslo_vmware.api [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.488146] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944431, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.589011] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 5435b4d8-46c3-43e3-b11b-cbeb580e2f36] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 961.730769] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944432, 'name': PowerOnVM_Task} progress is 92%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.777683] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.871104] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944425, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.611007} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.871104] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b/OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b.vmdk to [datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1/10f6eb30-f78d-487c-b50f-3e423a5ce5e1.vmdk. [ 961.871104] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Cleaning up location [datastore1] OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 961.871104] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_fb6dcbbf-6833-466e-a347-30b9134ed68b {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.871104] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c45e1a6-e0ac-47b2-99eb-eb5af13b340f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.873401] env[68437]: DEBUG nova.compute.manager [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 961.881106] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 961.881106] env[68437]: value = "task-2944435" [ 961.881106] env[68437]: _type = "Task" [ 961.881106] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.892472] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944428, 'name': CreateSnapshot_Task, 'duration_secs': 1.765385} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.895405] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 961.895682] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.897710] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 961.897932] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.898101] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 961.898287] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.898435] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 961.898581] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 961.898792] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 961.898946] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 961.899128] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 961.899293] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 961.899465] env[68437]: DEBUG nova.virt.hardware [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 961.900214] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f7ce89-375e-4a7a-b592-5e4e22bad583 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.902993] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c639e7-afdf-44a8-ae11-4f1752e21904 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.915018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e60e8fa-ba18-4cd2-a224-ff7f8c1242f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.928583] env[68437]: DEBUG nova.scheduler.client.report [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 961.943760] env[68437]: DEBUG oslo_vmware.api [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944434, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168287} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.943932] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.944297] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.944297] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.944465] env[68437]: INFO nova.compute.manager [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Took 1.20 seconds to destroy the instance on the hypervisor. [ 961.944688] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.945088] env[68437]: DEBUG nova.compute.manager [-] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.945189] env[68437]: DEBUG nova.network.neutron [-] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 961.988882] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944431, 'name': CreateVM_Task, 'duration_secs': 0.518672} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.989108] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.989785] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.990032] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.990318] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.990570] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ade2d91a-4f32-4a20-ac3b-828d00b1f179 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.995414] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 961.995414] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52dc9cff-1b4a-2b1e-ca65-a40d294bf883" [ 961.995414] env[68437]: _type = "Task" [ 961.995414] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.004851] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52dc9cff-1b4a-2b1e-ca65-a40d294bf883, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.092615] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: f517b14c-320f-4a6e-ae74-f2335e22f7a4] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 962.204911] env[68437]: DEBUG nova.compute.manager [req-0f9eb758-bf3f-4eff-8c00-5d86db7849d0 req-1011ed4a-8476-4a14-aaec-23be17a825e5 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Received event network-vif-deleted-bba0a9a2-7033-420f-baf6-f59f37b8b8b8 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 962.205153] env[68437]: INFO nova.compute.manager [req-0f9eb758-bf3f-4eff-8c00-5d86db7849d0 req-1011ed4a-8476-4a14-aaec-23be17a825e5 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Neutron deleted interface bba0a9a2-7033-420f-baf6-f59f37b8b8b8; detaching it from the instance and deleting it from the info cache [ 962.205311] env[68437]: DEBUG nova.network.neutron [req-0f9eb758-bf3f-4eff-8c00-5d86db7849d0 req-1011ed4a-8476-4a14-aaec-23be17a825e5 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.226687] env[68437]: DEBUG oslo_vmware.api [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944432, 'name': PowerOnVM_Task, 'duration_secs': 0.607933} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.226950] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.227165] env[68437]: INFO nova.compute.manager [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Took 8.10 seconds to spawn the instance on the hypervisor. [ 962.227348] env[68437]: DEBUG nova.compute.manager [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 962.228120] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a7d580-5744-49ce-a1c8-7ca5868fbb9f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.252581] env[68437]: DEBUG nova.compute.manager [req-b9dd88f4-d9c3-4528-968b-2f7963eec0eb req-4c47fad7-1f36-439c-9407-dc842ee0da75 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Received event network-vif-plugged-7be66ccd-47fb-4b51-ac58-a60ae0578274 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 962.252840] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9dd88f4-d9c3-4528-968b-2f7963eec0eb req-4c47fad7-1f36-439c-9407-dc842ee0da75 service nova] Acquiring lock "3a2dad52-63d3-46ec-ac43-3922bca3919e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.253091] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9dd88f4-d9c3-4528-968b-2f7963eec0eb req-4c47fad7-1f36-439c-9407-dc842ee0da75 service nova] Lock "3a2dad52-63d3-46ec-ac43-3922bca3919e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.253228] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9dd88f4-d9c3-4528-968b-2f7963eec0eb req-4c47fad7-1f36-439c-9407-dc842ee0da75 service nova] Lock "3a2dad52-63d3-46ec-ac43-3922bca3919e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.253439] env[68437]: DEBUG nova.compute.manager [req-b9dd88f4-d9c3-4528-968b-2f7963eec0eb req-4c47fad7-1f36-439c-9407-dc842ee0da75 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] No waiting events found dispatching network-vif-plugged-7be66ccd-47fb-4b51-ac58-a60ae0578274 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 962.253582] env[68437]: WARNING nova.compute.manager [req-b9dd88f4-d9c3-4528-968b-2f7963eec0eb req-4c47fad7-1f36-439c-9407-dc842ee0da75 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Received unexpected event network-vif-plugged-7be66ccd-47fb-4b51-ac58-a60ae0578274 for instance with vm_state building and task_state spawning. [ 962.346325] env[68437]: DEBUG nova.network.neutron [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Successfully updated port: 7be66ccd-47fb-4b51-ac58-a60ae0578274 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.394366] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037899} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.394658] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 962.394831] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1/10f6eb30-f78d-487c-b50f-3e423a5ce5e1.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.395090] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1/10f6eb30-f78d-487c-b50f-3e423a5ce5e1.vmdk to [datastore1] c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd/c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 962.395352] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d46a956-1f1f-4d40-8207-3ec438b90d34 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.401376] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 962.401376] env[68437]: value = "task-2944436" [ 962.401376] env[68437]: _type = "Task" [ 962.401376] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.408823] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.421148] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 962.421872] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0f3866ae-4124-40da-b945-ab28dfcf9e61 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.430015] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 962.430015] env[68437]: value = "task-2944437" [ 962.430015] env[68437]: _type = "Task" [ 962.430015] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.434388] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.434934] env[68437]: DEBUG nova.compute.manager [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 962.437725] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.230s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.438011] env[68437]: DEBUG nova.objects.instance [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lazy-loading 'resources' on Instance uuid aaa2a858-9cc0-4b5a-8729-80e5440b530d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.444676] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944437, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.504882] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52dc9cff-1b4a-2b1e-ca65-a40d294bf883, 'name': SearchDatastore_Task, 'duration_secs': 0.011949} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.505245] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.505485] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.505740] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.505941] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.506154] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.506414] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f716828-6c9e-4893-b2db-219a4795b23d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.514834] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.515049] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.515939] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0cf8337-debe-4f0c-abd6-ef9b71f1c8ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.521991] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 962.521991] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1115b-6390-a0a7-98de-54a96a4bfb8b" [ 962.521991] env[68437]: _type = "Task" [ 962.521991] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.529729] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1115b-6390-a0a7-98de-54a96a4bfb8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.596359] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: a2d0200e-e38e-4ef7-8d6c-29e9ceeb397f] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 962.689918] env[68437]: DEBUG nova.network.neutron [-] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.708566] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6ff7f56-5b49-460d-a043-ed7bcd8007d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.718652] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e34e33-0095-427e-b785-ba0dd594f1c7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.759157] env[68437]: DEBUG nova.compute.manager [req-0f9eb758-bf3f-4eff-8c00-5d86db7849d0 req-1011ed4a-8476-4a14-aaec-23be17a825e5 service nova] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Detach interface failed, port_id=bba0a9a2-7033-420f-baf6-f59f37b8b8b8, reason: Instance 6d877579-3095-4ee9-bb3e-4d5a9122f1ed could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 962.763722] env[68437]: INFO nova.compute.manager [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Took 49.84 seconds to build instance. [ 962.848623] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquiring lock "refresh_cache-3a2dad52-63d3-46ec-ac43-3922bca3919e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.848623] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquired lock "refresh_cache-3a2dad52-63d3-46ec-ac43-3922bca3919e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.848820] env[68437]: DEBUG nova.network.neutron [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 962.912324] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944436, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.944483] env[68437]: DEBUG nova.compute.utils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 962.946266] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944437, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.946513] env[68437]: DEBUG nova.compute.manager [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 963.032709] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1115b-6390-a0a7-98de-54a96a4bfb8b, 'name': SearchDatastore_Task, 'duration_secs': 0.009381} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.036320] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7bc7986-2822-448b-981e-7a8d4681cd09 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.042017] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 963.042017] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52623f23-118c-261a-c983-416e0dc5e431" [ 963.042017] env[68437]: _type = "Task" [ 963.042017] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.052054] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52623f23-118c-261a-c983-416e0dc5e431, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.101020] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 1537e626-f2ec-4b5d-bcba-50cd583dff31] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 963.196652] env[68437]: INFO nova.compute.manager [-] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Took 1.25 seconds to deallocate network for instance. [ 963.265822] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38b9c58a-aaa3-489b-a01c-10851c74f763 tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.355s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.323104] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4671472-8949-4105-9fe8-cda30d3bc777 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.333018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebd73c3-8c96-4999-9c97-b685c92161c7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.367059] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9349a2e-42c9-49f8-9eb1-85d2d66476f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.375815] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501ef31e-4e8a-4252-9c3d-0a0959733506 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.391055] env[68437]: DEBUG nova.compute.provider_tree [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.399913] env[68437]: DEBUG nova.network.neutron [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 963.412164] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944436, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.441120] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944437, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.448956] env[68437]: DEBUG nova.compute.manager [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 963.553056] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52623f23-118c-261a-c983-416e0dc5e431, 'name': SearchDatastore_Task, 'duration_secs': 0.091677} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.553353] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.553964] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 3f82b137-81d5-4754-b222-3cefce0b2a10/3f82b137-81d5-4754-b222-3cefce0b2a10.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 963.553964] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4adfd4f-3cbb-499c-ae94-f4c95f8456df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.562223] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 963.562223] env[68437]: value = "task-2944438" [ 963.562223] env[68437]: _type = "Task" [ 963.562223] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.568377] env[68437]: DEBUG nova.network.neutron [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Updating instance_info_cache with network_info: [{"id": "7be66ccd-47fb-4b51-ac58-a60ae0578274", "address": "fa:16:3e:9a:22:fb", "network": {"id": "b8785d6f-289d-43d2-8430-87415fdfa8ff", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2059834869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3520c17d0b3494f94b25778f952356a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be66ccd-47", "ovs_interfaceid": "7be66ccd-47fb-4b51-ac58-a60ae0578274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.577199] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.602239] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 0649ee2f-cd90-4597-b7c4-09f2acaf3f54] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 963.705757] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.894814] env[68437]: DEBUG nova.scheduler.client.report [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 963.914713] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944436, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.942262] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944437, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.072919] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.074492] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Releasing lock "refresh_cache-3a2dad52-63d3-46ec-ac43-3922bca3919e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.074846] env[68437]: DEBUG nova.compute.manager [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Instance network_info: |[{"id": "7be66ccd-47fb-4b51-ac58-a60ae0578274", "address": "fa:16:3e:9a:22:fb", "network": {"id": "b8785d6f-289d-43d2-8430-87415fdfa8ff", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2059834869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3520c17d0b3494f94b25778f952356a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be66ccd-47", "ovs_interfaceid": "7be66ccd-47fb-4b51-ac58-a60ae0578274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 964.075228] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:22:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c7821ea-f92f-4f06-a4cb-05e1186a9d22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7be66ccd-47fb-4b51-ac58-a60ae0578274', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 964.082814] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Creating folder: Project (c3520c17d0b3494f94b25778f952356a). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 964.083169] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23d72ad4-3e9c-48f5-98a5-df61ab533404 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.093518] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Created folder: Project (c3520c17d0b3494f94b25778f952356a) in parent group-v590848. [ 964.093745] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Creating folder: Instances. Parent ref: group-v591035. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 964.094009] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46b8eee3-82a3-4248-a784-20b2f15262ea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.103689] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Created folder: Instances in parent group-v591035. [ 964.103960] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 964.104190] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 964.104408] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa6549d1-f421-4af0-ac5a-cec4881e6665 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.119385] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 5abc2c5a-2177-4d77-97ce-872808bb47ee] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 964.126354] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 964.126354] env[68437]: value = "task-2944441" [ 964.126354] env[68437]: _type = "Task" [ 964.126354] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.138975] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944441, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.333270] env[68437]: DEBUG nova.compute.manager [req-6fe6ccd9-7cd2-4a1d-968e-1fa62ab5a421 req-93c5e49d-b869-4916-b024-a93fd84b93f6 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Received event network-changed-7be66ccd-47fb-4b51-ac58-a60ae0578274 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 964.333569] env[68437]: DEBUG nova.compute.manager [req-6fe6ccd9-7cd2-4a1d-968e-1fa62ab5a421 req-93c5e49d-b869-4916-b024-a93fd84b93f6 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Refreshing instance network info cache due to event network-changed-7be66ccd-47fb-4b51-ac58-a60ae0578274. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 964.333731] env[68437]: DEBUG oslo_concurrency.lockutils [req-6fe6ccd9-7cd2-4a1d-968e-1fa62ab5a421 req-93c5e49d-b869-4916-b024-a93fd84b93f6 service nova] Acquiring lock "refresh_cache-3a2dad52-63d3-46ec-ac43-3922bca3919e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.333846] env[68437]: DEBUG oslo_concurrency.lockutils [req-6fe6ccd9-7cd2-4a1d-968e-1fa62ab5a421 req-93c5e49d-b869-4916-b024-a93fd84b93f6 service nova] Acquired lock "refresh_cache-3a2dad52-63d3-46ec-ac43-3922bca3919e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.334017] env[68437]: DEBUG nova.network.neutron [req-6fe6ccd9-7cd2-4a1d-968e-1fa62ab5a421 req-93c5e49d-b869-4916-b024-a93fd84b93f6 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Refreshing network info cache for port 7be66ccd-47fb-4b51-ac58-a60ae0578274 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 964.406286] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.409606] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.551s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.410016] env[68437]: DEBUG nova.objects.instance [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lazy-loading 'resources' on Instance uuid 4f46132c-155d-4def-b017-7fd84e37eed5 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.424484] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944436, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.433831] env[68437]: INFO nova.scheduler.client.report [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Deleted allocations for instance aaa2a858-9cc0-4b5a-8729-80e5440b530d [ 964.450215] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944437, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.457845] env[68437]: DEBUG nova.compute.manager [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 964.484331] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 964.484612] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.484769] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 964.484954] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.485126] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 964.485548] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 964.485548] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 964.485817] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 964.485817] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 964.486064] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 964.486154] env[68437]: DEBUG nova.virt.hardware [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 964.487070] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05131dc3-b2f3-4329-a011-65d8dd0de2ea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.496890] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fa390e-f5f4-43a1-8597-96965bcc3a12 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.513875] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 964.520113] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Creating folder: Project (e80e24aa4e8e41809d90df3adf9c2792). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 964.521347] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-607cfebd-db77-4582-a059-a6df7335868f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.532926] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Created folder: Project (e80e24aa4e8e41809d90df3adf9c2792) in parent group-v590848. [ 964.532926] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Creating folder: Instances. Parent ref: group-v591038. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 964.533151] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f2f5d739-b59a-4b78-893d-c15baaa9745a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.542962] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Created folder: Instances in parent group-v591038. [ 964.543324] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 964.543568] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 964.543822] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f2b6480-4a70-45b7-b913-4fa73c2f8d6f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.561687] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 964.561687] env[68437]: value = "task-2944444" [ 964.561687] env[68437]: _type = "Task" [ 964.561687] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.573872] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944444, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.577264] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.622874] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ea330078-a8f2-41f4-a161-5d0e29ddfab5] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 964.639154] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944441, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.924596] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944436, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.391106} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.924853] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/10f6eb30-f78d-487c-b50f-3e423a5ce5e1/10f6eb30-f78d-487c-b50f-3e423a5ce5e1.vmdk to [datastore1] c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd/c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 964.925766] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54cca83-c8ed-456e-9a9e-635df2807fec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.949819] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6a39152-939b-467d-9939-3762ced57814 tempest-AttachInterfacesV270Test-1664810740 tempest-AttachInterfacesV270Test-1664810740-project-member] Lock "aaa2a858-9cc0-4b5a-8729-80e5440b530d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.202s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.960851] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd/c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 964.972165] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78d92171-455e-4ecd-ac02-f73744acdf94 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.990133] env[68437]: DEBUG oslo_concurrency.lockutils [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquiring lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.990133] env[68437]: DEBUG oslo_concurrency.lockutils [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.990133] env[68437]: DEBUG oslo_concurrency.lockutils [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquiring lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.990133] env[68437]: DEBUG oslo_concurrency.lockutils [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.990133] env[68437]: DEBUG oslo_concurrency.lockutils [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.992337] env[68437]: INFO nova.compute.manager [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Terminating instance [ 965.007296] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 965.007296] env[68437]: value = "task-2944445" [ 965.007296] env[68437]: _type = "Task" [ 965.007296] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.011810] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944437, 'name': CloneVM_Task} progress is 95%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.027463] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944445, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.086107] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944438, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.093295] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944444, 'name': CreateVM_Task, 'duration_secs': 0.431063} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.093745] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.094245] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.094461] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.094829] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.095109] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5cb031e-c827-4223-9778-b24d60d32f25 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.101617] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 965.101617] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52966545-2f06-4e91-3943-eb5364f751de" [ 965.101617] env[68437]: _type = "Task" [ 965.101617] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.113742] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52966545-2f06-4e91-3943-eb5364f751de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.126518] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ce8fd88b-249b-4fee-80fc-35b795d24658] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 965.146328] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944441, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.277414] env[68437]: DEBUG nova.network.neutron [req-6fe6ccd9-7cd2-4a1d-968e-1fa62ab5a421 req-93c5e49d-b869-4916-b024-a93fd84b93f6 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Updated VIF entry in instance network info cache for port 7be66ccd-47fb-4b51-ac58-a60ae0578274. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 965.277828] env[68437]: DEBUG nova.network.neutron [req-6fe6ccd9-7cd2-4a1d-968e-1fa62ab5a421 req-93c5e49d-b869-4916-b024-a93fd84b93f6 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Updating instance_info_cache with network_info: [{"id": "7be66ccd-47fb-4b51-ac58-a60ae0578274", "address": "fa:16:3e:9a:22:fb", "network": {"id": "b8785d6f-289d-43d2-8430-87415fdfa8ff", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2059834869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c3520c17d0b3494f94b25778f952356a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c7821ea-f92f-4f06-a4cb-05e1186a9d22", "external-id": "nsx-vlan-transportzone-69", "segmentation_id": 69, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7be66ccd-47", "ovs_interfaceid": "7be66ccd-47fb-4b51-ac58-a60ae0578274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.385812] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d38a753-61f8-478f-915d-032139b91d4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.393595] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5943e86b-bd03-4ae0-ac26-1cf48948829f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.425754] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd240df-ed08-459e-a4d8-9f00b57fc001 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.433870] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5047876d-4974-4b64-8452-a201accf2d04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.448858] env[68437]: DEBUG nova.compute.provider_tree [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.460148] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944437, 'name': CloneVM_Task, 'duration_secs': 2.724549} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.461178] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Created linked-clone VM from snapshot [ 965.461981] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d433c538-0137-4ac7-9999-8cd028864bd3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.472292] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Uploading image 2f46a171-976e-486e-806f-88cae57dee64 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 965.494221] env[68437]: DEBUG oslo_vmware.rw_handles [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 965.494221] env[68437]: value = "vm-591034" [ 965.494221] env[68437]: _type = "VirtualMachine" [ 965.494221] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 965.494505] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e3daa901-f1db-484d-8cac-a4fa9c1a469d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.496946] env[68437]: DEBUG nova.compute.manager [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 965.497165] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.498165] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcb9b22-44be-48ae-8510-795b4e4bcd2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.502300] env[68437]: DEBUG oslo_vmware.rw_handles [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lease: (returnval){ [ 965.502300] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b03f6a-8093-9265-8fa1-be63017dabb6" [ 965.502300] env[68437]: _type = "HttpNfcLease" [ 965.502300] env[68437]: } obtained for exporting VM: (result){ [ 965.502300] env[68437]: value = "vm-591034" [ 965.502300] env[68437]: _type = "VirtualMachine" [ 965.502300] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 965.502585] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the lease: (returnval){ [ 965.502585] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b03f6a-8093-9265-8fa1-be63017dabb6" [ 965.502585] env[68437]: _type = "HttpNfcLease" [ 965.502585] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 965.508746] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.509477] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04cd3c48-70c9-41b1-ae05-9f6af58bc309 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.516027] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 965.516027] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b03f6a-8093-9265-8fa1-be63017dabb6" [ 965.516027] env[68437]: _type = "HttpNfcLease" [ 965.516027] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 965.520687] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944445, 'name': ReconfigVM_Task, 'duration_secs': 0.477272} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.521729] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Reconfigured VM instance instance-00000043 to attach disk [datastore1] c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd/c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 965.522448] env[68437]: DEBUG oslo_vmware.api [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 965.522448] env[68437]: value = "task-2944447" [ 965.522448] env[68437]: _type = "Task" [ 965.522448] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.522688] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbd1652d-f344-4437-a691-a22b2c9132f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.532186] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 965.532186] env[68437]: value = "task-2944448" [ 965.532186] env[68437]: _type = "Task" [ 965.532186] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.544927] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944448, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.580252] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944438, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.619373} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.580252] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 3f82b137-81d5-4754-b222-3cefce0b2a10/3f82b137-81d5-4754-b222-3cefce0b2a10.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.580252] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.580252] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fe46a07-232f-428f-8fc8-1d946b482bcd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.586434] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 965.586434] env[68437]: value = "task-2944449" [ 965.586434] env[68437]: _type = "Task" [ 965.586434] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.594330] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.612513] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52966545-2f06-4e91-3943-eb5364f751de, 'name': SearchDatastore_Task, 'duration_secs': 0.059475} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.612881] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.613249] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.613562] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.613778] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.614027] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.614687] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72462eed-c042-43e7-aa5b-628f7a504c2b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.630329] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.630723] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.634641] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b717762-aa1f-41ce-a28e-84f9d81af35e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.641130] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 27c18765-38cf-41d6-9139-9acffa94fbe6] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 965.646553] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 965.646553] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e3e708-1b97-8405-007c-1b79867ce234" [ 965.646553] env[68437]: _type = "Task" [ 965.646553] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.652324] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944441, 'name': CreateVM_Task, 'duration_secs': 1.087994} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.654473] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.655227] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.655432] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.655749] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.656507] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee1eeecf-f385-409f-8143-5ef9bf07994d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.662334] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e3e708-1b97-8405-007c-1b79867ce234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.665719] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 965.665719] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52907589-fc5d-cab5-e070-54abe14c6f71" [ 965.665719] env[68437]: _type = "Task" [ 965.665719] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.674032] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52907589-fc5d-cab5-e070-54abe14c6f71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.780055] env[68437]: DEBUG oslo_concurrency.lockutils [req-6fe6ccd9-7cd2-4a1d-968e-1fa62ab5a421 req-93c5e49d-b869-4916-b024-a93fd84b93f6 service nova] Releasing lock "refresh_cache-3a2dad52-63d3-46ec-ac43-3922bca3919e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.954805] env[68437]: DEBUG nova.scheduler.client.report [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.011484] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 966.011484] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b03f6a-8093-9265-8fa1-be63017dabb6" [ 966.011484] env[68437]: _type = "HttpNfcLease" [ 966.011484] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 966.011822] env[68437]: DEBUG oslo_vmware.rw_handles [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 966.011822] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b03f6a-8093-9265-8fa1-be63017dabb6" [ 966.011822] env[68437]: _type = "HttpNfcLease" [ 966.011822] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 966.012650] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd75bfa8-a6bc-4944-ae30-39047348c43d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.021333] env[68437]: DEBUG oslo_vmware.rw_handles [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a6f39-4ebe-b6d3-8e35-dac56b1cee2b/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 966.021586] env[68437]: DEBUG oslo_vmware.rw_handles [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a6f39-4ebe-b6d3-8e35-dac56b1cee2b/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 966.099711] env[68437]: DEBUG oslo_vmware.api [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944447, 'name': PowerOffVM_Task, 'duration_secs': 0.234712} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.100882] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 966.101369] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 966.101514] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c67cbe6f-5664-4202-bd15-2866ed929966 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.108251] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077796} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.108477] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944448, 'name': Rename_Task, 'duration_secs': 0.546485} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.109044] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 966.109375] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.110104] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326f4b5e-8004-4a37-a693-51573f692902 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.112363] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8c19111-26a6-4b6d-adb1-23865f0d56a6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.132892] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 3f82b137-81d5-4754-b222-3cefce0b2a10/3f82b137-81d5-4754-b222-3cefce0b2a10.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.136275] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5293bfd-9b0d-47ab-aa21-1e83aa65a46d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.150949] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 966.150949] env[68437]: value = "task-2944451" [ 966.150949] env[68437]: _type = "Task" [ 966.150949] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.151914] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 45595615-59c0-4c59-b18c-b49a3126dbb7] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 966.153699] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c3f46ee4-f1b3-4aad-a6fa-9fe7dfdec0fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.163863] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 966.163863] env[68437]: value = "task-2944452" [ 966.163863] env[68437]: _type = "Task" [ 966.163863] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.181189] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944451, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.194744] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e3e708-1b97-8405-007c-1b79867ce234, 'name': SearchDatastore_Task, 'duration_secs': 0.017325} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.196540] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8737c6f-9a8b-4ba0-9399-bd1fafc42b10 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.202788] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52907589-fc5d-cab5-e070-54abe14c6f71, 'name': SearchDatastore_Task, 'duration_secs': 0.064421} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.208354] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.208663] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.208895] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.210811] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 966.210811] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52aee99e-7425-a0c9-aec9-5d4505f7c3aa" [ 966.210811] env[68437]: _type = "Task" [ 966.210811] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.218392] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52aee99e-7425-a0c9-aec9-5d4505f7c3aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.461835] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.051s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.465487] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.862s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.466736] env[68437]: DEBUG nova.objects.instance [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lazy-loading 'resources' on Instance uuid 013a92cc-0fc2-4e85-aee6-efb62bae4dcb {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.495724] env[68437]: INFO nova.scheduler.client.report [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Deleted allocations for instance 4f46132c-155d-4def-b017-7fd84e37eed5 [ 966.591352] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 966.592019] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 966.592419] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Deleting the datastore file [datastore1] 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 966.592993] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ffc1f6c-066f-45a6-a07c-c99a6f8aa24c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.600766] env[68437]: DEBUG oslo_vmware.api [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for the task: (returnval){ [ 966.600766] env[68437]: value = "task-2944453" [ 966.600766] env[68437]: _type = "Task" [ 966.600766] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.612077] env[68437]: DEBUG oslo_vmware.api [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.658931] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ddaeb82e-bbf7-430a-bcc8-5b1f70b4d236] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 966.680817] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944452, 'name': ReconfigVM_Task, 'duration_secs': 0.303774} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.681124] env[68437]: DEBUG oslo_vmware.api [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944451, 'name': PowerOnVM_Task, 'duration_secs': 0.531486} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.681567] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 3f82b137-81d5-4754-b222-3cefce0b2a10/3f82b137-81d5-4754-b222-3cefce0b2a10.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.682298] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.682850] env[68437]: INFO nova.compute.manager [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Took 15.16 seconds to spawn the instance on the hypervisor. [ 966.683122] env[68437]: DEBUG nova.compute.manager [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 966.683401] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38337237-6193-449d-9725-8cfa3c3686cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.685834] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8195034c-b415-4643-9778-cf158cb20681 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.700267] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 966.700267] env[68437]: value = "task-2944454" [ 966.700267] env[68437]: _type = "Task" [ 966.700267] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.719031] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944454, 'name': Rename_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.725255] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52aee99e-7425-a0c9-aec9-5d4505f7c3aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010789} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.725701] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.726149] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5/9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.726579] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.726866] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.727174] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe0b4598-f07f-4122-a4b5-3154d89e3fec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.730342] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-261db367-2f16-4e13-828d-07548b24a534 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.737825] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 966.737825] env[68437]: value = "task-2944455" [ 966.737825] env[68437]: _type = "Task" [ 966.737825] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.744030] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.744030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.745087] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a9e23a1-aea3-4604-8b84-c20c4a1006c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.751571] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.756308] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 966.756308] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5268af39-3c0e-f38a-386b-5f9c4fe6fde4" [ 966.756308] env[68437]: _type = "Task" [ 966.756308] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.766955] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5268af39-3c0e-f38a-386b-5f9c4fe6fde4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.008249] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a2f794-a7fd-44ca-b27f-724b09ed148a tempest-ImagesOneServerNegativeTestJSON-1820869063 tempest-ImagesOneServerNegativeTestJSON-1820869063-project-member] Lock "4f46132c-155d-4def-b017-7fd84e37eed5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.727s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.121946] env[68437]: DEBUG oslo_vmware.api [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Task: {'id': task-2944453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198537} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.122427] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 967.122942] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 967.123208] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 967.123361] env[68437]: INFO nova.compute.manager [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Took 1.63 seconds to destroy the instance on the hypervisor. [ 967.123998] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 967.127546] env[68437]: DEBUG nova.compute.manager [-] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 967.128884] env[68437]: DEBUG nova.network.neutron [-] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 967.164696] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 180f77ab-e468-410d-8e41-20291487ef5d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 967.220543] env[68437]: INFO nova.compute.manager [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Took 54.33 seconds to build instance. [ 967.230149] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944454, 'name': Rename_Task, 'duration_secs': 0.153343} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.230516] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.230683] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65f4ddfb-26df-4062-b1f5-a35644b52b64 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.244284] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 967.244284] env[68437]: value = "task-2944456" [ 967.244284] env[68437]: _type = "Task" [ 967.244284] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.255245] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944455, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.269141] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944456, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.276888] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5268af39-3c0e-f38a-386b-5f9c4fe6fde4, 'name': SearchDatastore_Task, 'duration_secs': 0.019079} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.281802] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b69914d4-e0ee-4e9d-a556-fdbb8a813699 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.288335] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 967.288335] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52616597-df64-bb25-84ef-376488ce06a5" [ 967.288335] env[68437]: _type = "Task" [ 967.288335] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.301506] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52616597-df64-bb25-84ef-376488ce06a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.473819] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6253db-4826-4994-b214-b12a74ea95ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.486047] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d25cee7-4d58-4161-962e-431787e360a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.524642] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f91c5b-0622-4cf9-aacb-2ad075265a9d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.539439] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79c4180-99ca-41bb-a680-51f8fb46902b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.556657] env[68437]: DEBUG nova.compute.provider_tree [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.670249] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 0484ccee-f003-4101-87c5-fed92f095d2d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 967.724526] env[68437]: DEBUG oslo_concurrency.lockutils [None req-987d8054-ca10-43f3-b662-4a0914914772 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.846s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.755244] env[68437]: DEBUG nova.compute.manager [req-4e9e7965-3c3d-496c-8051-a9270c3181a2 req-3ce7b88a-cf2c-462b-8ef1-5e34e567c2a1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Received event network-vif-deleted-ea0db909-110c-4015-9c31-5b6aae262d4b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 967.755533] env[68437]: INFO nova.compute.manager [req-4e9e7965-3c3d-496c-8051-a9270c3181a2 req-3ce7b88a-cf2c-462b-8ef1-5e34e567c2a1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Neutron deleted interface ea0db909-110c-4015-9c31-5b6aae262d4b; detaching it from the instance and deleting it from the info cache [ 967.755779] env[68437]: DEBUG nova.network.neutron [req-4e9e7965-3c3d-496c-8051-a9270c3181a2 req-3ce7b88a-cf2c-462b-8ef1-5e34e567c2a1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.757314] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944455, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557628} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.758442] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5/9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.758656] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.759040] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d3203f2-4246-4fea-b598-9fa7c2236d34 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.767925] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944456, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.773811] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 967.773811] env[68437]: value = "task-2944457" [ 967.773811] env[68437]: _type = "Task" [ 967.773811] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.782940] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944457, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.802385] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52616597-df64-bb25-84ef-376488ce06a5, 'name': SearchDatastore_Task, 'duration_secs': 0.01847} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.803031] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.803302] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 3a2dad52-63d3-46ec-ac43-3922bca3919e/3a2dad52-63d3-46ec-ac43-3922bca3919e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.803741] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc713d7c-a72e-45a8-b2a2-107f8472b367 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.810447] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 967.810447] env[68437]: value = "task-2944458" [ 967.810447] env[68437]: _type = "Task" [ 967.810447] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.820692] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944458, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.034302] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.034580] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.034801] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.035034] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.035221] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.038239] env[68437]: INFO nova.compute.manager [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Terminating instance [ 968.060042] env[68437]: DEBUG nova.scheduler.client.report [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.094979] env[68437]: DEBUG nova.network.neutron [-] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.173728] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: df3fbf16-d3d9-4138-b563-6ea09dd233b8] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 968.255473] env[68437]: DEBUG oslo_vmware.api [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944456, 'name': PowerOnVM_Task, 'duration_secs': 0.577868} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.255760] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.255981] env[68437]: INFO nova.compute.manager [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Took 11.55 seconds to spawn the instance on the hypervisor. [ 968.256194] env[68437]: DEBUG nova.compute.manager [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.257032] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e22c9ba-9fba-4631-a2ef-240f705a3c73 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.259861] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbc042a8-edd6-464a-8316-562f499b47d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.273957] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ab9ac0-b7bb-4f0c-82bc-008d17c545e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.298455] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944457, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167982} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.298651] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.299507] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e5f546-8472-4f0c-99ce-9a5bf2939ab4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.318584] env[68437]: DEBUG nova.compute.manager [req-4e9e7965-3c3d-496c-8051-a9270c3181a2 req-3ce7b88a-cf2c-462b-8ef1-5e34e567c2a1 service nova] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Detach interface failed, port_id=ea0db909-110c-4015-9c31-5b6aae262d4b, reason: Instance 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 968.341242] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5/9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.345809] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54c0c9df-d0d0-44fb-8d94-0a7b44036414 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.361151] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944458, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.368351] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 968.368351] env[68437]: value = "task-2944459" [ 968.368351] env[68437]: _type = "Task" [ 968.368351] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.377179] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944459, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.541483] env[68437]: DEBUG nova.compute.manager [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.541799] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.542837] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771e1e74-d89c-4806-b1a7-c18f9d68962f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.553628] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.553991] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38c47238-0306-4ea3-bfa7-61539287aac7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.562871] env[68437]: DEBUG oslo_vmware.api [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 968.562871] env[68437]: value = "task-2944460" [ 968.562871] env[68437]: _type = "Task" [ 968.562871] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.567524] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.102s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.570647] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.925s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.571019] env[68437]: DEBUG nova.objects.instance [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lazy-loading 'resources' on Instance uuid b7706bf2-936f-439c-8e9f-b2241d0c211c {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.578327] env[68437]: DEBUG oslo_vmware.api [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944460, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.597425] env[68437]: INFO nova.scheduler.client.report [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted allocations for instance 013a92cc-0fc2-4e85-aee6-efb62bae4dcb [ 968.609018] env[68437]: INFO nova.compute.manager [-] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Took 1.48 seconds to deallocate network for instance. [ 968.676882] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.677119] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Cleaning up deleted instances with incomplete migration {{(pid=68437) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 968.785808] env[68437]: INFO nova.compute.manager [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Took 49.43 seconds to build instance. [ 968.829321] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944458, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.836544} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.829883] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 3a2dad52-63d3-46ec-ac43-3922bca3919e/3a2dad52-63d3-46ec-ac43-3922bca3919e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 968.830047] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 968.830879] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae86f10d-742c-454d-9536-91c3019b4855 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.839509] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 968.839509] env[68437]: value = "task-2944461" [ 968.839509] env[68437]: _type = "Task" [ 968.839509] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.855902] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944461, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.879248] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944459, 'name': ReconfigVM_Task, 'duration_secs': 0.469542} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.879547] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5/9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.880198] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3d2bb23-f82c-4ac4-8525-8ebe73d6a7e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.887375] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 968.887375] env[68437]: value = "task-2944462" [ 968.887375] env[68437]: _type = "Task" [ 968.887375] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.896683] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944462, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.082214] env[68437]: DEBUG oslo_vmware.api [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944460, 'name': PowerOffVM_Task, 'duration_secs': 0.253532} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.084220] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.084220] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.084220] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56755da8-f5de-4f24-8799-f71cc888184b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.112380] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b71cbca6-bf17-4fac-af43-a2f8c61b5c69 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "013a92cc-0fc2-4e85-aee6-efb62bae4dcb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.947s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.120363] env[68437]: DEBUG oslo_concurrency.lockutils [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.152570] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.152864] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.153087] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleting the datastore file [datastore1] c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.153384] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0c5338a-b8a2-4451-a6e5-67a1fcc4c750 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.161246] env[68437]: DEBUG oslo_vmware.api [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 969.161246] env[68437]: value = "task-2944464" [ 969.161246] env[68437]: _type = "Task" [ 969.161246] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.177446] env[68437]: DEBUG oslo_vmware.api [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944464, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.181472] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.286308] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a1ad8bfe-b85d-48c4-9ffc-bf4349034415 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.940s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.305323] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "d5db3112-88c7-43af-a434-b91ca69f8559" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.305654] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "d5db3112-88c7-43af-a434-b91ca69f8559" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.305920] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "d5db3112-88c7-43af-a434-b91ca69f8559-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.306159] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "d5db3112-88c7-43af-a434-b91ca69f8559-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.306378] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "d5db3112-88c7-43af-a434-b91ca69f8559-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.311937] env[68437]: INFO nova.compute.manager [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Terminating instance [ 969.352283] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944461, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086179} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.352652] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.353680] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af9951f-e6c6-454c-8ced-4b80ac71a0e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.383253] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 3a2dad52-63d3-46ec-ac43-3922bca3919e/3a2dad52-63d3-46ec-ac43-3922bca3919e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.387019] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26d126c5-a444-4d95-8323-b49937d634ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.414490] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944462, 'name': Rename_Task, 'duration_secs': 0.209616} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.419825] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 969.420765] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 969.420765] env[68437]: value = "task-2944465" [ 969.420765] env[68437]: _type = "Task" [ 969.420765] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.422010] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99c47e6e-eeaa-447e-9fbb-a96645e42d96 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.430187] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 969.430187] env[68437]: value = "task-2944466" [ 969.430187] env[68437]: _type = "Task" [ 969.430187] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.434256] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944465, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.447931] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944466, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.550031] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c9ae31-b0ef-4fb0-a0fc-e6e51445c100 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.558228] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb324655-08e4-430b-a8d7-680475385b2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.591847] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030e7133-6b1e-4224-81dc-df451c0bd973 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.600108] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82e297f-8036-4a98-a5cf-8c13030380d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.614735] env[68437]: DEBUG nova.compute.provider_tree [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.675546] env[68437]: DEBUG oslo_vmware.api [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944464, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181433} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.675862] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.676100] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.676362] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.676711] env[68437]: INFO nova.compute.manager [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Took 1.13 seconds to destroy the instance on the hypervisor. [ 969.676992] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.677374] env[68437]: DEBUG nova.compute.manager [-] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.677479] env[68437]: DEBUG nova.network.neutron [-] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 969.816008] env[68437]: DEBUG nova.compute.manager [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 969.816340] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 969.817464] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d54bea-7702-4b56-a7df-4cf165558b82 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.835085] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 969.835085] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f807cc33-c180-44c8-a2f3-4279ec091162 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.843019] env[68437]: DEBUG oslo_vmware.api [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 969.843019] env[68437]: value = "task-2944467" [ 969.843019] env[68437]: _type = "Task" [ 969.843019] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.851674] env[68437]: DEBUG oslo_vmware.api [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.934507] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944465, 'name': ReconfigVM_Task, 'duration_secs': 0.459795} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.938224] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 3a2dad52-63d3-46ec-ac43-3922bca3919e/3a2dad52-63d3-46ec-ac43-3922bca3919e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.938926] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a073deb-ef2a-4c7f-b29f-b4d8547a3369 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.946392] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944466, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.947628] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 969.947628] env[68437]: value = "task-2944468" [ 969.947628] env[68437]: _type = "Task" [ 969.947628] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.963686] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944468, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.118668] env[68437]: DEBUG nova.scheduler.client.report [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 970.159775] env[68437]: DEBUG nova.compute.manager [req-477e1dde-23e6-4b05-9d13-9ae809985225 req-f6e667e2-ef4f-4d4e-ac13-89f96a0e2018 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-changed-bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 970.159980] env[68437]: DEBUG nova.compute.manager [req-477e1dde-23e6-4b05-9d13-9ae809985225 req-f6e667e2-ef4f-4d4e-ac13-89f96a0e2018 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Refreshing instance network info cache due to event network-changed-bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 970.160446] env[68437]: DEBUG oslo_concurrency.lockutils [req-477e1dde-23e6-4b05-9d13-9ae809985225 req-f6e667e2-ef4f-4d4e-ac13-89f96a0e2018 service nova] Acquiring lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.160678] env[68437]: DEBUG oslo_concurrency.lockutils [req-477e1dde-23e6-4b05-9d13-9ae809985225 req-f6e667e2-ef4f-4d4e-ac13-89f96a0e2018 service nova] Acquired lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.160883] env[68437]: DEBUG nova.network.neutron [req-477e1dde-23e6-4b05-9d13-9ae809985225 req-f6e667e2-ef4f-4d4e-ac13-89f96a0e2018 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Refreshing network info cache for port bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 970.181741] env[68437]: DEBUG nova.compute.manager [req-61aa96fa-a0ba-4190-af10-674da371f385 req-5f1d6c40-e1e6-4a5a-a27e-1667ecc7247b service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Received event network-vif-deleted-12fcc1b5-700b-4949-9981-774da9ed33b0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 970.182539] env[68437]: INFO nova.compute.manager [req-61aa96fa-a0ba-4190-af10-674da371f385 req-5f1d6c40-e1e6-4a5a-a27e-1667ecc7247b service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Neutron deleted interface 12fcc1b5-700b-4949-9981-774da9ed33b0; detaching it from the instance and deleting it from the info cache [ 970.183035] env[68437]: DEBUG nova.network.neutron [req-61aa96fa-a0ba-4190-af10-674da371f385 req-5f1d6c40-e1e6-4a5a-a27e-1667ecc7247b service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.353856] env[68437]: DEBUG oslo_vmware.api [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944467, 'name': PowerOffVM_Task, 'duration_secs': 0.241486} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.354208] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.354393] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.354587] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-826079e0-0d16-419b-a96b-9d70f2736b90 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.419772] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.419868] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.420079] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleting the datastore file [datastore2] d5db3112-88c7-43af-a434-b91ca69f8559 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.420357] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d80b20a1-86f1-4b7e-a10f-353b37de7030 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.432515] env[68437]: DEBUG oslo_vmware.api [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 970.432515] env[68437]: value = "task-2944470" [ 970.432515] env[68437]: _type = "Task" [ 970.432515] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.446114] env[68437]: DEBUG oslo_vmware.api [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.456859] env[68437]: DEBUG oslo_vmware.api [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944466, 'name': PowerOnVM_Task, 'duration_secs': 0.71054} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.460324] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 970.460573] env[68437]: INFO nova.compute.manager [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Took 6.00 seconds to spawn the instance on the hypervisor. [ 970.460774] env[68437]: DEBUG nova.compute.manager [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 970.461520] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944468, 'name': Rename_Task, 'duration_secs': 0.182883} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.462639] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcedd00-d49b-4343-b2e0-61adee8081c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.465434] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.465718] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0747e5b2-a6ea-4389-a624-983057e49b1d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.477279] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 970.477279] env[68437]: value = "task-2944471" [ 970.477279] env[68437]: _type = "Task" [ 970.477279] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.485610] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944471, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.532393] env[68437]: DEBUG nova.network.neutron [-] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.624706] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.054s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.627564] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.867s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.627725] env[68437]: DEBUG nova.objects.instance [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lazy-loading 'resources' on Instance uuid cf394b0b-cb14-4ae1-81bb-622c951bfdab {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.655986] env[68437]: INFO nova.scheduler.client.report [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Deleted allocations for instance b7706bf2-936f-439c-8e9f-b2241d0c211c [ 970.692085] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09b549d1-0ad3-4f15-be65-1c2de6d9e487 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.702286] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebd61bb-e507-4b73-b1de-156071fdce3b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.748297] env[68437]: DEBUG nova.compute.manager [req-61aa96fa-a0ba-4190-af10-674da371f385 req-5f1d6c40-e1e6-4a5a-a27e-1667ecc7247b service nova] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Detach interface failed, port_id=12fcc1b5-700b-4949-9981-774da9ed33b0, reason: Instance c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 970.922421] env[68437]: DEBUG nova.network.neutron [req-477e1dde-23e6-4b05-9d13-9ae809985225 req-f6e667e2-ef4f-4d4e-ac13-89f96a0e2018 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updated VIF entry in instance network info cache for port bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 970.923049] env[68437]: DEBUG nova.network.neutron [req-477e1dde-23e6-4b05-9d13-9ae809985225 req-f6e667e2-ef4f-4d4e-ac13-89f96a0e2018 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [{"id": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "address": "fa:16:3e:ed:52:c2", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd525d9d-aa", "ovs_interfaceid": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.943629] env[68437]: DEBUG oslo_vmware.api [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216913} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.943978] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.944189] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.944374] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.944548] env[68437]: INFO nova.compute.manager [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Took 1.13 seconds to destroy the instance on the hypervisor. [ 970.944791] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 970.945070] env[68437]: DEBUG nova.compute.manager [-] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 970.945552] env[68437]: DEBUG nova.network.neutron [-] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 970.989079] env[68437]: INFO nova.compute.manager [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Took 22.03 seconds to build instance. [ 970.997050] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944471, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.038519] env[68437]: INFO nova.compute.manager [-] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Took 1.36 seconds to deallocate network for instance. [ 971.166638] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1bddd9a9-4505-41a9-935c-309f9a4fa666 tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "b7706bf2-936f-439c-8e9f-b2241d0c211c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.060s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.426368] env[68437]: DEBUG oslo_concurrency.lockutils [req-477e1dde-23e6-4b05-9d13-9ae809985225 req-f6e667e2-ef4f-4d4e-ac13-89f96a0e2018 service nova] Releasing lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.486622] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106a68f8-c27f-40ea-9ddc-5da29023c13a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.491640] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c3d7edba-2ae1-4e46-8186-9f53b5414a20 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.559s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.495211] env[68437]: DEBUG oslo_vmware.api [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944471, 'name': PowerOnVM_Task, 'duration_secs': 0.568018} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.497279] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.497524] env[68437]: INFO nova.compute.manager [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Took 9.62 seconds to spawn the instance on the hypervisor. [ 971.497718] env[68437]: DEBUG nova.compute.manager [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.498580] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67bc8d2-9c2a-41c6-91c6-407224e8b718 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.501980] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2beca06d-5a55-4407-b6e2-fbb513ae666d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.539297] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b043665b-389f-49b8-a9a7-1ae51e1382b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.548514] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.550494] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83aa5f28-ad7f-4042-9774-e6ce650fbdfc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.566600] env[68437]: DEBUG nova.compute.provider_tree [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.785273] env[68437]: DEBUG nova.network.neutron [-] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.048418] env[68437]: INFO nova.compute.manager [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Took 23.41 seconds to build instance. [ 972.069146] env[68437]: DEBUG nova.scheduler.client.report [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.225928] env[68437]: INFO nova.compute.manager [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Rebuilding instance [ 972.278863] env[68437]: DEBUG nova.compute.manager [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.280389] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6bc8a5-f8c2-4138-a5b1-16c10bb3d6bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.287641] env[68437]: INFO nova.compute.manager [-] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Took 1.34 seconds to deallocate network for instance. [ 972.302835] env[68437]: DEBUG nova.compute.manager [req-8a03f1cf-8e20-4f6e-b513-ef44ea9bc0cb req-3cb970e8-6b02-4756-ac1c-519066208c1a service nova] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Received event network-vif-deleted-b8341297-717e-4e99-89cc-784162d9ffb3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 972.549720] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "1186da93-57aa-40f4-8aae-702d039844d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.550011] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.550249] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "1186da93-57aa-40f4-8aae-702d039844d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.550441] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.550616] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.552479] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1e30a1d3-a88c-4362-8591-6b4e151e83e3 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "3a2dad52-63d3-46ec-ac43-3922bca3919e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.926s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.553018] env[68437]: INFO nova.compute.manager [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Terminating instance [ 972.573598] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.946s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.576891] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.215s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.578414] env[68437]: INFO nova.compute.claims [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 972.599189] env[68437]: INFO nova.scheduler.client.report [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Deleted allocations for instance cf394b0b-cb14-4ae1-81bb-622c951bfdab [ 972.800260] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.060940] env[68437]: DEBUG nova.compute.manager [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.061343] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.062361] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbc1d78-318c-4b98-b241-cc12113ae257 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.071812] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.072071] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09f1cbb3-e2d0-4213-8cec-2e2c9bc81a84 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.078530] env[68437]: DEBUG oslo_vmware.api [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 973.078530] env[68437]: value = "task-2944472" [ 973.078530] env[68437]: _type = "Task" [ 973.078530] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.089912] env[68437]: DEBUG oslo_vmware.api [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944472, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.110274] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dca3429f-4220-47d4-ab20-3a7f345230cf tempest-VolumesAdminNegativeTest-411280535 tempest-VolumesAdminNegativeTest-411280535-project-member] Lock "cf394b0b-cb14-4ae1-81bb-622c951bfdab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.548s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.301031] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.302033] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da114987-1fd4-40f9-871b-09782173059d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.311696] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 973.311696] env[68437]: value = "task-2944473" [ 973.311696] env[68437]: _type = "Task" [ 973.311696] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.320821] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.590194] env[68437]: DEBUG oslo_vmware.api [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944472, 'name': PowerOffVM_Task, 'duration_secs': 0.290212} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.593358] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.593604] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.594122] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-447fc1bc-9bc3-4014-82b2-ab5daab9795a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.787206] env[68437]: DEBUG nova.compute.manager [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.788222] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d69e2a-b771-4d1e-ade6-4ee6d821a3e9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.820931] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944473, 'name': PowerOffVM_Task, 'duration_secs': 0.18658} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.823914] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.824811] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.826415] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf8cf09-30f6-4887-948c-25557be06fa3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.833670] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.834432] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.834432] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Deleting the datastore file [datastore2] 1186da93-57aa-40f4-8aae-702d039844d4 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.836780] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a44c2e1a-53cd-411d-91f4-1324e68363ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.838751] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.841907] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdf0f1e4-159c-4520-817a-cda42e528711 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.847936] env[68437]: DEBUG oslo_vmware.api [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for the task: (returnval){ [ 973.847936] env[68437]: value = "task-2944475" [ 973.847936] env[68437]: _type = "Task" [ 973.847936] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.858030] env[68437]: DEBUG oslo_vmware.api [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944475, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.867613] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.867957] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.868215] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Deleting the datastore file [datastore1] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.868516] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15a377a9-2c6a-40c7-9973-6aa99fa5e3f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.876711] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 973.876711] env[68437]: value = "task-2944477" [ 973.876711] env[68437]: _type = "Task" [ 973.876711] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.887066] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.948479] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e633578-d780-4897-9a83-fded1158af07 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.953380] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014968ce-12fd-42fe-96d2-d3f8073e4e51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.988704] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503bb7c0-daf8-4cc9-af48-bfe4d7608d23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.001457] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626715df-8418-4aeb-971c-ecf678e0851d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.015734] env[68437]: DEBUG nova.compute.provider_tree [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.302456] env[68437]: INFO nova.compute.manager [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] instance snapshotting [ 974.305422] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388af3bc-b63e-42f0-9770-8f3696d3f100 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.324821] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24da5e87-21a8-4894-b195-057fd3545389 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.357538] env[68437]: DEBUG oslo_vmware.api [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Task: {'id': task-2944475, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241434} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.360482] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.360482] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.360482] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.360482] env[68437]: INFO nova.compute.manager [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Took 1.30 seconds to destroy the instance on the hypervisor. [ 974.360482] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.360482] env[68437]: DEBUG nova.compute.manager [-] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.360482] env[68437]: DEBUG nova.network.neutron [-] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 974.387068] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179549} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.387359] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.387542] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.387799] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.518976] env[68437]: DEBUG nova.scheduler.client.report [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.680596] env[68437]: DEBUG oslo_vmware.rw_handles [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a6f39-4ebe-b6d3-8e35-dac56b1cee2b/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 974.681625] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c1bf1b-838f-48af-b61c-6038f729b68a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.693210] env[68437]: DEBUG oslo_vmware.rw_handles [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a6f39-4ebe-b6d3-8e35-dac56b1cee2b/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 974.693210] env[68437]: ERROR oslo_vmware.rw_handles [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a6f39-4ebe-b6d3-8e35-dac56b1cee2b/disk-0.vmdk due to incomplete transfer. [ 974.693870] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a221c529-d60e-4627-8a50-165a7ea0f22b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.702590] env[68437]: DEBUG oslo_vmware.rw_handles [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520a6f39-4ebe-b6d3-8e35-dac56b1cee2b/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 974.702718] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Uploaded image 2f46a171-976e-486e-806f-88cae57dee64 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 974.705147] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 974.706135] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d7f7fc4a-55d3-4d2f-8df9-3b51759cc833 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.712784] env[68437]: DEBUG nova.compute.manager [req-dade07f8-f3cc-41e9-81d4-e4944bb908fe req-5ffce319-87aa-490b-906b-145e5b590b0a service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Received event network-vif-deleted-4fd952c0-7921-4632-b5de-2fe90c4bba05 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 974.712784] env[68437]: INFO nova.compute.manager [req-dade07f8-f3cc-41e9-81d4-e4944bb908fe req-5ffce319-87aa-490b-906b-145e5b590b0a service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Neutron deleted interface 4fd952c0-7921-4632-b5de-2fe90c4bba05; detaching it from the instance and deleting it from the info cache [ 974.712784] env[68437]: DEBUG nova.network.neutron [req-dade07f8-f3cc-41e9-81d4-e4944bb908fe req-5ffce319-87aa-490b-906b-145e5b590b0a service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.714923] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 974.714923] env[68437]: value = "task-2944478" [ 974.714923] env[68437]: _type = "Task" [ 974.714923] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.725125] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944478, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.836351] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 974.836673] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9c4d5a00-6513-429b-b2f2-7becc88f62a8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.843682] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 974.843682] env[68437]: value = "task-2944479" [ 974.843682] env[68437]: _type = "Task" [ 974.843682] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.852267] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944479, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.026305] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.450s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.026885] env[68437]: DEBUG nova.compute.manager [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 975.030349] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.981s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.032041] env[68437]: INFO nova.compute.claims [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 975.190576] env[68437]: DEBUG nova.network.neutron [-] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.216648] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5f40d88b-df41-4012-b30b-af1005e0cb84 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.228446] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944478, 'name': Destroy_Task, 'duration_secs': 0.342715} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.229721] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Destroyed the VM [ 975.230026] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 975.230309] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4e4cb4b9-bcfc-4001-979a-52ddbdaaf878 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.234717] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc64b071-b194-4b30-aa35-66e1956857a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.252877] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 975.252877] env[68437]: value = "task-2944480" [ 975.252877] env[68437]: _type = "Task" [ 975.252877] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.269049] env[68437]: DEBUG nova.compute.manager [req-dade07f8-f3cc-41e9-81d4-e4944bb908fe req-5ffce319-87aa-490b-906b-145e5b590b0a service nova] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Detach interface failed, port_id=4fd952c0-7921-4632-b5de-2fe90c4bba05, reason: Instance 1186da93-57aa-40f4-8aae-702d039844d4 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 975.272536] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944480, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.356418] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944479, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.423648] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 975.423923] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 975.424112] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 975.424303] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 975.424448] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 975.424592] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 975.424803] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 975.424957] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 975.425136] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 975.425309] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 975.425472] env[68437]: DEBUG nova.virt.hardware [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 975.426474] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b17316-13fe-435b-a9cd-0764d4316da4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.434301] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3580e674-e49e-4f0f-be66-6e0e667b4ce9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.450754] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 975.456668] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 975.458089] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 975.458089] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a22ed965-758d-4a50-838b-ad8c1b04ca5d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.476051] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 975.476051] env[68437]: value = "task-2944481" [ 975.476051] env[68437]: _type = "Task" [ 975.476051] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.488456] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944481, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.536542] env[68437]: DEBUG nova.compute.utils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 975.538479] env[68437]: DEBUG nova.compute.manager [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 975.538690] env[68437]: DEBUG nova.network.neutron [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 975.646650] env[68437]: DEBUG nova.policy [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd1ba077acdb424681ab37b017eaff0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '376f884a99d6438aa53e3df5b9c34450', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 975.694600] env[68437]: INFO nova.compute.manager [-] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Took 1.34 seconds to deallocate network for instance. [ 975.764291] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944480, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.855169] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944479, 'name': CreateSnapshot_Task, 'duration_secs': 0.90591} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.855739] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 975.856295] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd6cf26-0614-4fb3-b907-7d4c5877043d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.986955] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944481, 'name': CreateVM_Task, 'duration_secs': 0.431052} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.987162] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 975.987589] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.987755] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.988092] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 975.988350] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-943b1efb-d1f8-4820-baec-0915d22c4f17 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.992682] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 975.992682] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521b421f-3332-c93a-f88d-453f74a25bbd" [ 975.992682] env[68437]: _type = "Task" [ 975.992682] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.999895] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521b421f-3332-c93a-f88d-453f74a25bbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.044417] env[68437]: DEBUG nova.compute.manager [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 976.201306] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.206612] env[68437]: DEBUG nova.network.neutron [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Successfully created port: 15bbe98e-d7a1-4f4f-929a-e9c9e1b91362 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 976.265889] env[68437]: DEBUG oslo_vmware.api [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944480, 'name': RemoveSnapshot_Task, 'duration_secs': 0.594297} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.268417] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 976.268658] env[68437]: INFO nova.compute.manager [None req-8d2d2c31-78e8-40f0-b4d9-b93b02a2c77b tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Took 16.96 seconds to snapshot the instance on the hypervisor. [ 976.359459] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f252f7c-645a-48f6-9336-ec6658fb3b6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.375467] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 976.376069] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bb3bf590-6c34-470c-a8cc-7d71f6b7bf86 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.379450] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c881db7e-062e-4b33-864b-b5eeb6e9a6a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.411378] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f62ef56-b0b0-4ed5-a434-97766b968327 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.414851] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 976.414851] env[68437]: value = "task-2944482" [ 976.414851] env[68437]: _type = "Task" [ 976.414851] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.422410] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cf9d54-0dc8-495c-9629-ef3466bdb8e4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.429737] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944482, 'name': CloneVM_Task} progress is 12%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.440304] env[68437]: DEBUG nova.compute.provider_tree [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.504624] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521b421f-3332-c93a-f88d-453f74a25bbd, 'name': SearchDatastore_Task, 'duration_secs': 0.049707} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.504994] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.505288] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 976.505578] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.505732] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.505916] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 976.506200] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-214fdb14-9d26-403d-9134-a0d9931b8045 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.516104] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 976.516397] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 976.517152] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e11e00d0-3920-46bd-b58a-9eec34fdf7f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.522672] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 976.522672] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522283cc-8a24-43a5-05b4-6ef533876404" [ 976.522672] env[68437]: _type = "Task" [ 976.522672] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.530340] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522283cc-8a24-43a5-05b4-6ef533876404, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.779163] env[68437]: DEBUG nova.network.neutron [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Successfully created port: 27913afc-65e0-4710-a03a-e1f99ee3ff22 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 976.932087] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944482, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.943308] env[68437]: DEBUG nova.scheduler.client.report [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 977.036090] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522283cc-8a24-43a5-05b4-6ef533876404, 'name': SearchDatastore_Task, 'duration_secs': 0.01196} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.036090] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a9a68ce-a53d-4cce-b664-e0d8efd914a8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.040230] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 977.040230] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526dbfec-f6d2-6d25-aace-02df0c55aba6" [ 977.040230] env[68437]: _type = "Task" [ 977.040230] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.047842] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526dbfec-f6d2-6d25-aace-02df0c55aba6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.057414] env[68437]: DEBUG nova.compute.manager [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 977.086089] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 977.086359] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.086519] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 977.086715] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.086856] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 977.087010] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 977.088873] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 977.089122] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 977.089340] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 977.089625] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 977.089906] env[68437]: DEBUG nova.virt.hardware [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 977.090871] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ecd937-fbca-4592-8e56-d71bbf672881 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.102018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4dba0b2-6fe2-4aba-ad72-a1f478dc2f45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.428037] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944482, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.453288] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.453856] env[68437]: DEBUG nova.compute.manager [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 977.456746] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.679s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.460032] env[68437]: INFO nova.compute.claims [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 977.552949] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526dbfec-f6d2-6d25-aace-02df0c55aba6, 'name': SearchDatastore_Task, 'duration_secs': 0.017299} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.552949] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.553657] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5/9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 977.553657] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddfabc06-2002-44db-a5b7-ad053d074f39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.560433] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 977.560433] env[68437]: value = "task-2944483" [ 977.560433] env[68437]: _type = "Task" [ 977.560433] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.569336] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944483, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.932348] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944482, 'name': CloneVM_Task} progress is 95%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.966954] env[68437]: DEBUG nova.compute.utils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 977.968713] env[68437]: DEBUG nova.compute.manager [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 978.071627] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944483, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445089} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.072665] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5/9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 978.072665] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 978.072665] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c24fc4db-172d-4974-b581-9f404ee57ad0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.080064] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 978.080064] env[68437]: value = "task-2944484" [ 978.080064] env[68437]: _type = "Task" [ 978.080064] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.088349] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944484, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.310202] env[68437]: DEBUG nova.compute.manager [req-a286065c-3f58-4cf9-b4a2-ea2f8ccf2043 req-46bdcb53-ac41-4eaa-86aa-c09844ac8cf8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Received event network-vif-plugged-15bbe98e-d7a1-4f4f-929a-e9c9e1b91362 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 978.310430] env[68437]: DEBUG oslo_concurrency.lockutils [req-a286065c-3f58-4cf9-b4a2-ea2f8ccf2043 req-46bdcb53-ac41-4eaa-86aa-c09844ac8cf8 service nova] Acquiring lock "ada623a8-b0ce-4709-b2af-ad80b464af4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.310628] env[68437]: DEBUG oslo_concurrency.lockutils [req-a286065c-3f58-4cf9-b4a2-ea2f8ccf2043 req-46bdcb53-ac41-4eaa-86aa-c09844ac8cf8 service nova] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.310793] env[68437]: DEBUG oslo_concurrency.lockutils [req-a286065c-3f58-4cf9-b4a2-ea2f8ccf2043 req-46bdcb53-ac41-4eaa-86aa-c09844ac8cf8 service nova] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.310964] env[68437]: DEBUG nova.compute.manager [req-a286065c-3f58-4cf9-b4a2-ea2f8ccf2043 req-46bdcb53-ac41-4eaa-86aa-c09844ac8cf8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] No waiting events found dispatching network-vif-plugged-15bbe98e-d7a1-4f4f-929a-e9c9e1b91362 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 978.311167] env[68437]: WARNING nova.compute.manager [req-a286065c-3f58-4cf9-b4a2-ea2f8ccf2043 req-46bdcb53-ac41-4eaa-86aa-c09844ac8cf8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Received unexpected event network-vif-plugged-15bbe98e-d7a1-4f4f-929a-e9c9e1b91362 for instance with vm_state building and task_state spawning. [ 978.382896] env[68437]: DEBUG nova.network.neutron [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Successfully updated port: 15bbe98e-d7a1-4f4f-929a-e9c9e1b91362 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 978.428308] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944482, 'name': CloneVM_Task, 'duration_secs': 1.967277} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.428895] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Created linked-clone VM from snapshot [ 978.429706] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adc578f-dcbe-49ba-94c0-dcb0e9075e44 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.438047] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Uploading image 7ae3c924-c7df-4c0e-b7ec-616d7490ffd0 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 978.463357] env[68437]: DEBUG oslo_vmware.rw_handles [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 978.463357] env[68437]: value = "vm-591043" [ 978.463357] env[68437]: _type = "VirtualMachine" [ 978.463357] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 978.464266] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1def61aa-f6a9-489f-9f7f-1489dab2dbd3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.473903] env[68437]: DEBUG nova.compute.manager [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 978.476924] env[68437]: DEBUG oslo_vmware.rw_handles [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lease: (returnval){ [ 978.476924] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520449b8-5066-e11a-b7e4-b14077b8abca" [ 978.476924] env[68437]: _type = "HttpNfcLease" [ 978.476924] env[68437]: } obtained for exporting VM: (result){ [ 978.476924] env[68437]: value = "vm-591043" [ 978.476924] env[68437]: _type = "VirtualMachine" [ 978.476924] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 978.477208] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the lease: (returnval){ [ 978.477208] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520449b8-5066-e11a-b7e4-b14077b8abca" [ 978.477208] env[68437]: _type = "HttpNfcLease" [ 978.477208] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 978.484183] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.484183] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520449b8-5066-e11a-b7e4-b14077b8abca" [ 978.484183] env[68437]: _type = "HttpNfcLease" [ 978.484183] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 978.589016] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944484, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062824} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.589361] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 978.590212] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b415ff64-1fca-4e25-97d8-8315f196ff80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.612825] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5/9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 978.615609] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8165a3cf-2965-44f7-b1ac-68a92de55da8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.634872] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 978.634872] env[68437]: value = "task-2944486" [ 978.634872] env[68437]: _type = "Task" [ 978.634872] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.642651] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944486, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.826349] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5072a3d-0c37-4211-88d7-372909131d20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.834729] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95d6228-d92e-4ed5-84db-bf6b46325b0a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.867060] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda25d41-1760-4771-94e5-81a92391e1c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.874446] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbe1f85-d964-49fb-b55b-b6340390d775 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.889862] env[68437]: DEBUG nova.compute.provider_tree [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.990281] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.990281] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520449b8-5066-e11a-b7e4-b14077b8abca" [ 978.990281] env[68437]: _type = "HttpNfcLease" [ 978.990281] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 978.990716] env[68437]: DEBUG oslo_vmware.rw_handles [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 978.990716] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520449b8-5066-e11a-b7e4-b14077b8abca" [ 978.990716] env[68437]: _type = "HttpNfcLease" [ 978.990716] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 978.991301] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd881364-626f-4e94-92f2-423eb2f23cc7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.000364] env[68437]: DEBUG oslo_vmware.rw_handles [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247bf8e-6c14-58e9-0211-daf1b4abdea4/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 979.000663] env[68437]: DEBUG oslo_vmware.rw_handles [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247bf8e-6c14-58e9-0211-daf1b4abdea4/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 979.147778] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944486, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.151207] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-82dabf02-1605-4ae4-a049-561b1f2b1c3d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.258249] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "4254002c-d292-4f10-a3d0-387853dbbcb3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.258538] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "4254002c-d292-4f10-a3d0-387853dbbcb3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.258762] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "4254002c-d292-4f10-a3d0-387853dbbcb3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.258946] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "4254002c-d292-4f10-a3d0-387853dbbcb3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.259138] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "4254002c-d292-4f10-a3d0-387853dbbcb3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.262027] env[68437]: INFO nova.compute.manager [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Terminating instance [ 979.393222] env[68437]: DEBUG nova.scheduler.client.report [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.488655] env[68437]: DEBUG nova.compute.manager [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 979.519592] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 979.519871] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.520097] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 979.520309] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.520675] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 979.521079] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 979.521357] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 979.521586] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 979.521814] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 979.522028] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 979.522580] env[68437]: DEBUG nova.virt.hardware [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 979.523904] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc58d700-d67c-4802-af9e-42d86648a91d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.532787] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c0ee64-fc14-42ee-afad-b7b8252aa098 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.555076] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 979.565462] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Creating folder: Project (201928ac5f124c2d892f57992d674851). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.566095] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b0417d9-e965-489b-b2fa-610a6f3c5732 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.578038] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Created folder: Project (201928ac5f124c2d892f57992d674851) in parent group-v590848. [ 979.578144] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Creating folder: Instances. Parent ref: group-v591044. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.578414] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f82ba59-9f79-4f03-86e1-e9a62e782353 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.589754] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Created folder: Instances in parent group-v591044. [ 979.589999] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 979.590404] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 979.590404] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ab40fd9-33cf-4a4d-a338-b6e64c92840d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.612227] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 979.612227] env[68437]: value = "task-2944489" [ 979.612227] env[68437]: _type = "Task" [ 979.612227] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.621647] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944489, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.648331] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944486, 'name': ReconfigVM_Task, 'duration_secs': 0.881554} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.648331] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5/9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 979.648908] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-826272b3-e91d-4c97-a2d6-c666fcd30811 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.655290] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 979.655290] env[68437]: value = "task-2944490" [ 979.655290] env[68437]: _type = "Task" [ 979.655290] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.663668] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944490, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.767978] env[68437]: DEBUG nova.compute.manager [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 979.768318] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 979.769360] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca58a1c-fadf-4288-bebb-eae35f02155a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.779017] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.779702] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-889d8703-8bfe-437f-b58a-df7a540bb687 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.787714] env[68437]: DEBUG oslo_vmware.api [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 979.787714] env[68437]: value = "task-2944491" [ 979.787714] env[68437]: _type = "Task" [ 979.787714] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.799502] env[68437]: DEBUG oslo_vmware.api [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944491, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.899539] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.442s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.900578] env[68437]: DEBUG nova.compute.manager [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 979.907076] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.200s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.907957] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.912168] env[68437]: DEBUG oslo_concurrency.lockutils [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.791s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.912282] env[68437]: DEBUG nova.objects.instance [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lazy-loading 'resources' on Instance uuid 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.946538] env[68437]: INFO nova.scheduler.client.report [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted allocations for instance 6d877579-3095-4ee9-bb3e-4d5a9122f1ed [ 980.123734] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944489, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.165449] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944490, 'name': Rename_Task, 'duration_secs': 0.186076} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.165774] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 980.166044] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-900786b1-b9c9-4e24-a149-6e8e32febd9f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.172735] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 980.172735] env[68437]: value = "task-2944492" [ 980.172735] env[68437]: _type = "Task" [ 980.172735] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.183833] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.298417] env[68437]: DEBUG oslo_vmware.api [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944491, 'name': PowerOffVM_Task, 'duration_secs': 0.229102} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.299949] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 980.299949] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 980.299949] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0eb47d2-b9b2-4293-a7b2-97f5318a1a61 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.362846] env[68437]: DEBUG nova.compute.manager [req-1ec38199-5669-4efc-ac60-ea6ed08a575c req-59a24ee5-5318-4e58-9ee4-1254f0552b65 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Received event network-changed-15bbe98e-d7a1-4f4f-929a-e9c9e1b91362 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 980.362937] env[68437]: DEBUG nova.compute.manager [req-1ec38199-5669-4efc-ac60-ea6ed08a575c req-59a24ee5-5318-4e58-9ee4-1254f0552b65 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Refreshing instance network info cache due to event network-changed-15bbe98e-d7a1-4f4f-929a-e9c9e1b91362. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 980.364358] env[68437]: DEBUG oslo_concurrency.lockutils [req-1ec38199-5669-4efc-ac60-ea6ed08a575c req-59a24ee5-5318-4e58-9ee4-1254f0552b65 service nova] Acquiring lock "refresh_cache-ada623a8-b0ce-4709-b2af-ad80b464af4e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.364358] env[68437]: DEBUG oslo_concurrency.lockutils [req-1ec38199-5669-4efc-ac60-ea6ed08a575c req-59a24ee5-5318-4e58-9ee4-1254f0552b65 service nova] Acquired lock "refresh_cache-ada623a8-b0ce-4709-b2af-ad80b464af4e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.364358] env[68437]: DEBUG nova.network.neutron [req-1ec38199-5669-4efc-ac60-ea6ed08a575c req-59a24ee5-5318-4e58-9ee4-1254f0552b65 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Refreshing network info cache for port 15bbe98e-d7a1-4f4f-929a-e9c9e1b91362 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 980.408174] env[68437]: DEBUG nova.compute.utils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 980.409799] env[68437]: DEBUG nova.compute.manager [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 980.410017] env[68437]: DEBUG nova.network.neutron [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 980.460807] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e7e29cb-47b7-4b83-9e33-53bd693a2648 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "6d877579-3095-4ee9-bb3e-4d5a9122f1ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.232s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.464532] env[68437]: DEBUG nova.policy [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0e66fd345044e92857d742c65f537ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a36ec823128647758ca8047a5ebf1ae1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 980.510317] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 980.510593] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 980.510782] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Deleting the datastore file [datastore1] 4254002c-d292-4f10-a3d0-387853dbbcb3 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 980.515127] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ceb527ac-7c9c-4f0d-bad5-0acf8c3725a5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.524121] env[68437]: DEBUG oslo_vmware.api [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 980.524121] env[68437]: value = "task-2944494" [ 980.524121] env[68437]: _type = "Task" [ 980.524121] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.537737] env[68437]: DEBUG oslo_vmware.api [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944494, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.565441] env[68437]: DEBUG nova.network.neutron [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Successfully updated port: 27913afc-65e0-4710-a03a-e1f99ee3ff22 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.629925] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944489, 'name': CreateVM_Task, 'duration_secs': 0.617317} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.630119] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 980.630589] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.630755] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.631347] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 980.631519] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddaffd14-8602-4249-b500-647747c026e1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.639227] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 980.639227] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d59dbb-be83-2914-1abd-7db9d489c6a1" [ 980.639227] env[68437]: _type = "Task" [ 980.639227] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.650958] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d59dbb-be83-2914-1abd-7db9d489c6a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.689128] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944492, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.843179] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63b93c4-7078-4e48-aebf-0ad4aaf02cca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.851468] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cc866e-5b2d-4a62-81e3-0a63cd598d1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.887178] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2e8a09-4b2a-4724-a192-45696d899fb4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.896618] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ce9b82-b4c0-4585-a033-8565c561295b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.903543] env[68437]: DEBUG nova.network.neutron [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Successfully created port: b9c19590-2f8d-4149-989f-8d0fd1e5fe29 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 980.915844] env[68437]: DEBUG nova.compute.manager [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 980.918945] env[68437]: DEBUG nova.compute.provider_tree [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.927933] env[68437]: DEBUG nova.network.neutron [req-1ec38199-5669-4efc-ac60-ea6ed08a575c req-59a24ee5-5318-4e58-9ee4-1254f0552b65 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 981.032150] env[68437]: DEBUG nova.network.neutron [req-1ec38199-5669-4efc-ac60-ea6ed08a575c req-59a24ee5-5318-4e58-9ee4-1254f0552b65 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.041018] env[68437]: DEBUG oslo_vmware.api [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944494, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.438518} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.041018] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 981.041018] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 981.041381] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 981.041732] env[68437]: INFO nova.compute.manager [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Took 1.27 seconds to destroy the instance on the hypervisor. [ 981.042014] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 981.042915] env[68437]: DEBUG nova.compute.manager [-] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 981.043014] env[68437]: DEBUG nova.network.neutron [-] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 981.070320] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "refresh_cache-ada623a8-b0ce-4709-b2af-ad80b464af4e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.152262] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d59dbb-be83-2914-1abd-7db9d489c6a1, 'name': SearchDatastore_Task, 'duration_secs': 0.030074} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.152625] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.152864] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 981.153043] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.153209] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.153442] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.153699] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5927da6-e710-4bce-8993-75cbdbe6e5b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.163500] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.163649] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 981.164341] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78fc82b7-64f9-4994-ad88-4e11ae068467 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.170790] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 981.170790] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520fff5d-f2f5-7152-55d3-87b9e38ad576" [ 981.170790] env[68437]: _type = "Task" [ 981.170790] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.183023] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520fff5d-f2f5-7152-55d3-87b9e38ad576, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.191097] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944492, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.431043] env[68437]: DEBUG nova.scheduler.client.report [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.534777] env[68437]: DEBUG oslo_concurrency.lockutils [req-1ec38199-5669-4efc-ac60-ea6ed08a575c req-59a24ee5-5318-4e58-9ee4-1254f0552b65 service nova] Releasing lock "refresh_cache-ada623a8-b0ce-4709-b2af-ad80b464af4e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.535278] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquired lock "refresh_cache-ada623a8-b0ce-4709-b2af-ad80b464af4e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.535522] env[68437]: DEBUG nova.network.neutron [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 981.682451] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520fff5d-f2f5-7152-55d3-87b9e38ad576, 'name': SearchDatastore_Task, 'duration_secs': 0.016385} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.686782] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fd99cf0-6cc1-4080-8e35-e48a936be8a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.694120] env[68437]: DEBUG oslo_vmware.api [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944492, 'name': PowerOnVM_Task, 'duration_secs': 1.261808} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.695517] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 981.695751] env[68437]: DEBUG nova.compute.manager [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.696100] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 981.696100] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52348512-db15-df8a-a90f-7805685f72e4" [ 981.696100] env[68437]: _type = "Task" [ 981.696100] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.696837] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8833f2f2-5cda-4391-ae1f-987b273e5d48 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.711709] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52348512-db15-df8a-a90f-7805685f72e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.928102] env[68437]: DEBUG nova.compute.manager [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 981.941277] env[68437]: DEBUG oslo_concurrency.lockutils [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.029s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.943812] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.395s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.944050] env[68437]: DEBUG nova.objects.instance [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lazy-loading 'resources' on Instance uuid c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.967705] env[68437]: DEBUG nova.network.neutron [-] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.983813] env[68437]: INFO nova.scheduler.client.report [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Deleted allocations for instance 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c [ 982.080357] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 982.080653] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 982.080991] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 982.080991] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 982.081420] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 982.081592] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 982.081941] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 982.082426] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 982.082426] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 982.082556] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 982.082734] env[68437]: DEBUG nova.virt.hardware [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 982.087569] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb55cac-7d53-4452-8aa2-2816bb395e0a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.088099] env[68437]: DEBUG nova.network.neutron [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 982.096415] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22954279-00e2-4939-9569-9cf3db2a66d7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.217108] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52348512-db15-df8a-a90f-7805685f72e4, 'name': SearchDatastore_Task, 'duration_secs': 0.017916} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.219468] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.220053] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 7422ff70-901c-4343-9b9f-f12c52348d2c/7422ff70-901c-4343-9b9f-f12c52348d2c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 982.221042] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.221532] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0fd8a81-0e2e-4a93-a3e4-3d031c3e6ec8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.231112] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 982.231112] env[68437]: value = "task-2944495" [ 982.231112] env[68437]: _type = "Task" [ 982.231112] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.241350] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.400936] env[68437]: DEBUG nova.compute.manager [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Received event network-vif-plugged-27913afc-65e0-4710-a03a-e1f99ee3ff22 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 982.401495] env[68437]: DEBUG oslo_concurrency.lockutils [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] Acquiring lock "ada623a8-b0ce-4709-b2af-ad80b464af4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.401764] env[68437]: DEBUG oslo_concurrency.lockutils [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.401975] env[68437]: DEBUG oslo_concurrency.lockutils [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.402194] env[68437]: DEBUG nova.compute.manager [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] No waiting events found dispatching network-vif-plugged-27913afc-65e0-4710-a03a-e1f99ee3ff22 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 982.402461] env[68437]: WARNING nova.compute.manager [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Received unexpected event network-vif-plugged-27913afc-65e0-4710-a03a-e1f99ee3ff22 for instance with vm_state building and task_state spawning. [ 982.402655] env[68437]: DEBUG nova.compute.manager [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Received event network-changed-27913afc-65e0-4710-a03a-e1f99ee3ff22 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 982.403309] env[68437]: DEBUG nova.compute.manager [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Refreshing instance network info cache due to event network-changed-27913afc-65e0-4710-a03a-e1f99ee3ff22. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 982.403412] env[68437]: DEBUG oslo_concurrency.lockutils [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] Acquiring lock "refresh_cache-ada623a8-b0ce-4709-b2af-ad80b464af4e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.471603] env[68437]: INFO nova.compute.manager [-] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Took 1.43 seconds to deallocate network for instance. [ 982.494668] env[68437]: DEBUG oslo_concurrency.lockutils [None req-41c1dbee-3da5-4f5d-b7b4-10694c307b6c tempest-ServerMetadataTestJSON-1219444224 tempest-ServerMetadataTestJSON-1219444224-project-member] Lock "2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.506s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.555020] env[68437]: DEBUG nova.compute.manager [req-a030b3e1-0db0-4e07-8f18-f0262f58cdac req-688afa0b-cdc2-4d75-81a1-bdcccc00cfb8 service nova] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Received event network-vif-plugged-b9c19590-2f8d-4149-989f-8d0fd1e5fe29 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 982.555020] env[68437]: DEBUG oslo_concurrency.lockutils [req-a030b3e1-0db0-4e07-8f18-f0262f58cdac req-688afa0b-cdc2-4d75-81a1-bdcccc00cfb8 service nova] Acquiring lock "697d5011-fb4e-4542-851b-39953bbb293d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.555020] env[68437]: DEBUG oslo_concurrency.lockutils [req-a030b3e1-0db0-4e07-8f18-f0262f58cdac req-688afa0b-cdc2-4d75-81a1-bdcccc00cfb8 service nova] Lock "697d5011-fb4e-4542-851b-39953bbb293d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.555020] env[68437]: DEBUG oslo_concurrency.lockutils [req-a030b3e1-0db0-4e07-8f18-f0262f58cdac req-688afa0b-cdc2-4d75-81a1-bdcccc00cfb8 service nova] Lock "697d5011-fb4e-4542-851b-39953bbb293d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.555219] env[68437]: DEBUG nova.compute.manager [req-a030b3e1-0db0-4e07-8f18-f0262f58cdac req-688afa0b-cdc2-4d75-81a1-bdcccc00cfb8 service nova] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] No waiting events found dispatching network-vif-plugged-b9c19590-2f8d-4149-989f-8d0fd1e5fe29 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 982.555279] env[68437]: WARNING nova.compute.manager [req-a030b3e1-0db0-4e07-8f18-f0262f58cdac req-688afa0b-cdc2-4d75-81a1-bdcccc00cfb8 service nova] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Received unexpected event network-vif-plugged-b9c19590-2f8d-4149-989f-8d0fd1e5fe29 for instance with vm_state building and task_state spawning. [ 982.672514] env[68437]: DEBUG nova.network.neutron [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Successfully updated port: b9c19590-2f8d-4149-989f-8d0fd1e5fe29 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 982.743257] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944495, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.801034] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5067007d-6e54-4f89-92a3-7e582ffac9cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.808931] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23e9d41-fc8d-4884-90e6-8d471e7e2984 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.846221] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0403bf-c4c2-42e1-b3f4-78e2663d4847 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.855282] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7982c94b-ed9e-465e-922a-1c6c6f817263 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.870466] env[68437]: DEBUG nova.compute.provider_tree [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.892248] env[68437]: DEBUG nova.network.neutron [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Updating instance_info_cache with network_info: [{"id": "15bbe98e-d7a1-4f4f-929a-e9c9e1b91362", "address": "fa:16:3e:17:f2:44", "network": {"id": "52beae94-217a-44b1-a55c-83213e20b70e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-451523707", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15bbe98e-d7", "ovs_interfaceid": "15bbe98e-d7a1-4f4f-929a-e9c9e1b91362", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "27913afc-65e0-4710-a03a-e1f99ee3ff22", "address": "fa:16:3e:76:bd:87", "network": {"id": "b3af08e7-2e52-4ea7-8f47-d3269140178e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-695282770", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27913afc-65", "ovs_interfaceid": "27913afc-65e0-4710-a03a-e1f99ee3ff22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.986591] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.175866] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.175996] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.176901] env[68437]: DEBUG nova.network.neutron [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 983.243300] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944495, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631388} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.243300] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 7422ff70-901c-4343-9b9f-f12c52348d2c/7422ff70-901c-4343-9b9f-f12c52348d2c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 983.243709] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 983.243872] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c01dcec1-c512-4b77-aca1-d138209e5e1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.252793] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 983.252793] env[68437]: value = "task-2944496" [ 983.252793] env[68437]: _type = "Task" [ 983.252793] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.264380] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944496, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.374033] env[68437]: DEBUG nova.scheduler.client.report [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.394861] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Releasing lock "refresh_cache-ada623a8-b0ce-4709-b2af-ad80b464af4e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.395420] env[68437]: DEBUG nova.compute.manager [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Instance network_info: |[{"id": "15bbe98e-d7a1-4f4f-929a-e9c9e1b91362", "address": "fa:16:3e:17:f2:44", "network": {"id": "52beae94-217a-44b1-a55c-83213e20b70e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-451523707", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15bbe98e-d7", "ovs_interfaceid": "15bbe98e-d7a1-4f4f-929a-e9c9e1b91362", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "27913afc-65e0-4710-a03a-e1f99ee3ff22", "address": "fa:16:3e:76:bd:87", "network": {"id": "b3af08e7-2e52-4ea7-8f47-d3269140178e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-695282770", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27913afc-65", "ovs_interfaceid": "27913afc-65e0-4710-a03a-e1f99ee3ff22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 983.397143] env[68437]: DEBUG oslo_concurrency.lockutils [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] Acquired lock "refresh_cache-ada623a8-b0ce-4709-b2af-ad80b464af4e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.397143] env[68437]: DEBUG nova.network.neutron [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Refreshing network info cache for port 27913afc-65e0-4710-a03a-e1f99ee3ff22 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 983.397309] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:f2:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4a9e02-45f1-4afb-8abb-0de26b153086', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15bbe98e-d7a1-4f4f-929a-e9c9e1b91362', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:bd:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27913afc-65e0-4710-a03a-e1f99ee3ff22', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 983.407821] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 983.409285] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 983.410028] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba7ed284-b78b-4868-8930-b776ed9bc1f4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.436247] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 983.436247] env[68437]: value = "task-2944497" [ 983.436247] env[68437]: _type = "Task" [ 983.436247] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.446488] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944497, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.598284] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.598608] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.598852] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.599063] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.599258] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.601603] env[68437]: INFO nova.compute.manager [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Terminating instance [ 983.713053] env[68437]: DEBUG nova.network.neutron [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 983.764426] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944496, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087936} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.764888] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 983.767211] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee5c9af-aac8-4380-85cd-c4285ba05bd1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.794017] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 7422ff70-901c-4343-9b9f-f12c52348d2c/7422ff70-901c-4343-9b9f-f12c52348d2c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 983.797585] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de9982d5-48ae-4660-8488-058420198574 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.820388] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 983.820388] env[68437]: value = "task-2944498" [ 983.820388] env[68437]: _type = "Task" [ 983.820388] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.830130] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944498, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.882589] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.939s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.885557] env[68437]: DEBUG nova.network.neutron [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance_info_cache with network_info: [{"id": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "address": "fa:16:3e:59:da:22", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9c19590-2f", "ovs_interfaceid": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.889440] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.089s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.889440] env[68437]: DEBUG nova.objects.instance [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lazy-loading 'resources' on Instance uuid d5db3112-88c7-43af-a434-b91ca69f8559 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.915564] env[68437]: INFO nova.scheduler.client.report [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted allocations for instance c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd [ 983.950232] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944497, 'name': CreateVM_Task, 'duration_secs': 0.476498} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.953617] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 983.954559] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.954755] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.955234] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 983.955840] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-087789b5-8089-40ed-9b68-bf128edef815 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.962777] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 983.962777] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a27cd6-0e03-a39e-3e9e-8f02b4ce175c" [ 983.962777] env[68437]: _type = "Task" [ 983.962777] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.976921] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a27cd6-0e03-a39e-3e9e-8f02b4ce175c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.105469] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "refresh_cache-9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.107220] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquired lock "refresh_cache-9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 984.107220] env[68437]: DEBUG nova.network.neutron [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 984.286380] env[68437]: DEBUG nova.network.neutron [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Updated VIF entry in instance network info cache for port 27913afc-65e0-4710-a03a-e1f99ee3ff22. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 984.291291] env[68437]: DEBUG nova.network.neutron [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Updating instance_info_cache with network_info: [{"id": "15bbe98e-d7a1-4f4f-929a-e9c9e1b91362", "address": "fa:16:3e:17:f2:44", "network": {"id": "52beae94-217a-44b1-a55c-83213e20b70e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-451523707", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4a9e02-45f1-4afb-8abb-0de26b153086", "external-id": "nsx-vlan-transportzone-336", "segmentation_id": 336, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15bbe98e-d7", "ovs_interfaceid": "15bbe98e-d7a1-4f4f-929a-e9c9e1b91362", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "27913afc-65e0-4710-a03a-e1f99ee3ff22", "address": "fa:16:3e:76:bd:87", "network": {"id": "b3af08e7-2e52-4ea7-8f47-d3269140178e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-695282770", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27913afc-65", "ovs_interfaceid": "27913afc-65e0-4710-a03a-e1f99ee3ff22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.331972] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944498, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.391125] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.391621] env[68437]: DEBUG nova.compute.manager [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Instance network_info: |[{"id": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "address": "fa:16:3e:59:da:22", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9c19590-2f", "ovs_interfaceid": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 984.398127] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:da:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9c19590-2f8d-4149-989f-8d0fd1e5fe29', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.414367] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 984.415285] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 984.415285] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-939a7be5-25a2-4e05-9f5c-70802fde2617 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.437697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e8e3b24-4111-4dcc-aeb5-8146a67932c4 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.403s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.443962] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.443962] env[68437]: value = "task-2944499" [ 984.443962] env[68437]: _type = "Task" [ 984.443962] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.457033] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944499, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.480934] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a27cd6-0e03-a39e-3e9e-8f02b4ce175c, 'name': SearchDatastore_Task, 'duration_secs': 0.017631} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.485008] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.485303] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 984.485594] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.485719] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 984.485910] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 984.487736] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cba8eaf9-abbd-4b49-9274-1ed1b18a9b02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.502910] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 984.503536] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 984.504840] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb0a40b4-af54-43d2-852c-f9d8bcba3c46 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.514238] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 984.514238] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5248b37d-d30c-5b80-e973-70a5481bfd42" [ 984.514238] env[68437]: _type = "Task" [ 984.514238] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.528262] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5248b37d-d30c-5b80-e973-70a5481bfd42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.627391] env[68437]: DEBUG nova.compute.manager [req-379e3841-f477-45e5-96c1-31b671851e82 req-86425942-45c8-4ed4-a849-8840f4e4f1d0 service nova] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Received event network-changed-b9c19590-2f8d-4149-989f-8d0fd1e5fe29 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 984.627610] env[68437]: DEBUG nova.compute.manager [req-379e3841-f477-45e5-96c1-31b671851e82 req-86425942-45c8-4ed4-a849-8840f4e4f1d0 service nova] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Refreshing instance network info cache due to event network-changed-b9c19590-2f8d-4149-989f-8d0fd1e5fe29. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 984.629098] env[68437]: DEBUG oslo_concurrency.lockutils [req-379e3841-f477-45e5-96c1-31b671851e82 req-86425942-45c8-4ed4-a849-8840f4e4f1d0 service nova] Acquiring lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.629098] env[68437]: DEBUG oslo_concurrency.lockutils [req-379e3841-f477-45e5-96c1-31b671851e82 req-86425942-45c8-4ed4-a849-8840f4e4f1d0 service nova] Acquired lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 984.629098] env[68437]: DEBUG nova.network.neutron [req-379e3841-f477-45e5-96c1-31b671851e82 req-86425942-45c8-4ed4-a849-8840f4e4f1d0 service nova] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Refreshing network info cache for port b9c19590-2f8d-4149-989f-8d0fd1e5fe29 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 984.642419] env[68437]: DEBUG nova.network.neutron [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 984.709764] env[68437]: DEBUG nova.network.neutron [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.793519] env[68437]: DEBUG oslo_concurrency.lockutils [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] Releasing lock "refresh_cache-ada623a8-b0ce-4709-b2af-ad80b464af4e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.793962] env[68437]: DEBUG nova.compute.manager [req-d4541c71-93ba-49a9-86d4-ffb8a9615dc3 req-9773630e-e21d-406e-add5-f0c5909863a8 service nova] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Received event network-vif-deleted-5afaab2f-9ec7-4b17-963e-30e20520a140 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 984.802113] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0afce32-87b7-455d-abcf-dcb09a2eb9b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.815196] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a885a1-b618-44d4-b891-396ccd47f7cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.860231] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6d3b35-8455-45ad-94b7-747225c4d5cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.867277] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944498, 'name': ReconfigVM_Task, 'duration_secs': 0.635117} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.867994] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 7422ff70-901c-4343-9b9f-f12c52348d2c/7422ff70-901c-4343-9b9f-f12c52348d2c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 984.868657] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2894e9fa-e0c7-41b7-bf06-b49833a62069 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.873955] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b411191e-f110-4f91-b2e0-ec95c961c760 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.880697] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 984.880697] env[68437]: value = "task-2944500" [ 984.880697] env[68437]: _type = "Task" [ 984.880697] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.892863] env[68437]: DEBUG nova.compute.provider_tree [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.913096] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944500, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.955747] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944499, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.025645] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5248b37d-d30c-5b80-e973-70a5481bfd42, 'name': SearchDatastore_Task, 'duration_secs': 0.017849} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.026549] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b497a5e-72bb-4eae-88e6-4d3387774b45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.035156] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 985.035156] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526cbaf1-962e-9817-f007-86acf468380f" [ 985.035156] env[68437]: _type = "Task" [ 985.035156] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.046786] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526cbaf1-962e-9817-f007-86acf468380f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.147663] env[68437]: DEBUG oslo_concurrency.lockutils [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.148058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.148181] env[68437]: DEBUG oslo_concurrency.lockutils [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.148356] env[68437]: DEBUG oslo_concurrency.lockutils [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.148564] env[68437]: DEBUG oslo_concurrency.lockutils [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.150815] env[68437]: INFO nova.compute.manager [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Terminating instance [ 985.213416] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Releasing lock "refresh_cache-9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 985.213871] env[68437]: DEBUG nova.compute.manager [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 985.216018] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.216018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ad0d55-f611-4ff1-8279-48a97c50a0d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.223957] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.224241] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01317da7-1256-443b-86b0-e038a51f1625 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.232253] env[68437]: DEBUG oslo_vmware.api [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 985.232253] env[68437]: value = "task-2944501" [ 985.232253] env[68437]: _type = "Task" [ 985.232253] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.242031] env[68437]: DEBUG oslo_vmware.api [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944501, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.396479] env[68437]: DEBUG nova.scheduler.client.report [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 985.401287] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944500, 'name': Rename_Task, 'duration_secs': 0.270339} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.402993] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 985.402993] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-969c2b4f-ff15-447e-8c27-d6e0ac0d724e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.417615] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 985.417615] env[68437]: value = "task-2944502" [ 985.417615] env[68437]: _type = "Task" [ 985.417615] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.426264] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944502, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.456031] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944499, 'name': CreateVM_Task, 'duration_secs': 0.53479} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.456031] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 985.457091] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.457653] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.458088] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 985.459017] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d00e81d-600f-44ae-9e2f-271b08b2144d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.467240] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 985.467240] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528b8df5-9b68-576b-5e95-2f802536823e" [ 985.467240] env[68437]: _type = "Task" [ 985.467240] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.477291] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528b8df5-9b68-576b-5e95-2f802536823e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.502354] env[68437]: DEBUG nova.network.neutron [req-379e3841-f477-45e5-96c1-31b671851e82 req-86425942-45c8-4ed4-a849-8840f4e4f1d0 service nova] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updated VIF entry in instance network info cache for port b9c19590-2f8d-4149-989f-8d0fd1e5fe29. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 985.502408] env[68437]: DEBUG nova.network.neutron [req-379e3841-f477-45e5-96c1-31b671851e82 req-86425942-45c8-4ed4-a849-8840f4e4f1d0 service nova] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance_info_cache with network_info: [{"id": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "address": "fa:16:3e:59:da:22", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9c19590-2f", "ovs_interfaceid": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.547725] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526cbaf1-962e-9817-f007-86acf468380f, 'name': SearchDatastore_Task, 'duration_secs': 0.012374} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.548205] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 985.548629] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ada623a8-b0ce-4709-b2af-ad80b464af4e/ada623a8-b0ce-4709-b2af-ad80b464af4e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 985.549079] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e601e83-829f-4d8a-addc-f22edc221bc4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.559435] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 985.559435] env[68437]: value = "task-2944503" [ 985.559435] env[68437]: _type = "Task" [ 985.559435] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.570952] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.657197] env[68437]: DEBUG nova.compute.manager [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 985.657475] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.658531] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba18d00-d356-422b-a4a9-f6addf2d1719 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.668164] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.668906] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8642cca6-8bec-4449-8566-0fabec69c418 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.679024] env[68437]: DEBUG oslo_vmware.api [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 985.679024] env[68437]: value = "task-2944504" [ 985.679024] env[68437]: _type = "Task" [ 985.679024] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.695883] env[68437]: DEBUG oslo_vmware.api [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944504, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.745539] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.745852] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.747283] env[68437]: DEBUG oslo_vmware.api [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944501, 'name': PowerOffVM_Task, 'duration_secs': 0.122764} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.747956] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 985.748331] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 985.748529] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b448043e-1035-48e2-9027-f67ddfd4d0ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.780485] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 985.781612] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 985.781612] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Deleting the datastore file [datastore2] 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.781612] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2290c044-05e4-449e-8517-a63d7851e19e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.793671] env[68437]: DEBUG oslo_vmware.api [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for the task: (returnval){ [ 985.793671] env[68437]: value = "task-2944506" [ 985.793671] env[68437]: _type = "Task" [ 985.793671] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.802490] env[68437]: DEBUG oslo_vmware.api [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.903830] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.016s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.907816] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.706s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.908186] env[68437]: DEBUG nova.objects.instance [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lazy-loading 'resources' on Instance uuid 1186da93-57aa-40f4-8aae-702d039844d4 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.930031] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944502, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.954035] env[68437]: INFO nova.scheduler.client.report [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted allocations for instance d5db3112-88c7-43af-a434-b91ca69f8559 [ 985.983529] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528b8df5-9b68-576b-5e95-2f802536823e, 'name': SearchDatastore_Task, 'duration_secs': 0.014975} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.983922] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 985.984185] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.984451] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.984575] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.984964] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.985063] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2e7bc63-37fc-4f1e-9b79-8092d28aea95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.002444] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 986.002444] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 986.002444] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6a03ff2-f291-4156-88fe-5dc08647227a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.005803] env[68437]: DEBUG oslo_concurrency.lockutils [req-379e3841-f477-45e5-96c1-31b671851e82 req-86425942-45c8-4ed4-a849-8840f4e4f1d0 service nova] Releasing lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.011653] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 986.011653] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5284c1e5-22c9-a4e0-c6aa-22496b7824ce" [ 986.011653] env[68437]: _type = "Task" [ 986.011653] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.024753] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5284c1e5-22c9-a4e0-c6aa-22496b7824ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.073101] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944503, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.190387] env[68437]: DEBUG oslo_vmware.api [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944504, 'name': PowerOffVM_Task, 'duration_secs': 0.290571} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.190699] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.190867] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 986.191150] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f103dcc-0707-43c3-bc99-7c352483b1f4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.249022] env[68437]: DEBUG nova.compute.manager [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 986.275772] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 986.276039] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 986.276238] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleting the datastore file [datastore1] d84c599e-29b2-45ec-a3f7-54ef85af9a3d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 986.276637] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e0309db-d543-49b9-953e-271c1b8f746d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.290140] env[68437]: DEBUG oslo_vmware.api [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 986.290140] env[68437]: value = "task-2944508" [ 986.290140] env[68437]: _type = "Task" [ 986.290140] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.303697] env[68437]: DEBUG oslo_vmware.api [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Task: {'id': task-2944506, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.44365} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.307947] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.308143] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 986.308328] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.308506] env[68437]: INFO nova.compute.manager [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Took 1.09 seconds to destroy the instance on the hypervisor. [ 986.308759] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 986.308974] env[68437]: DEBUG oslo_vmware.api [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.310411] env[68437]: DEBUG nova.compute.manager [-] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 986.312191] env[68437]: DEBUG nova.network.neutron [-] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 986.340813] env[68437]: DEBUG nova.network.neutron [-] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 986.428927] env[68437]: DEBUG oslo_vmware.api [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944502, 'name': PowerOnVM_Task, 'duration_secs': 0.670166} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.429495] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 986.429495] env[68437]: INFO nova.compute.manager [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Took 6.94 seconds to spawn the instance on the hypervisor. [ 986.430341] env[68437]: DEBUG nova.compute.manager [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.430789] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0802850f-ddbb-42ed-aeba-80437769828c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.466489] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9697f13a-db35-4d14-8c74-411fbd1bc3ff tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "d5db3112-88c7-43af-a434-b91ca69f8559" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.161s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.525496] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5284c1e5-22c9-a4e0-c6aa-22496b7824ce, 'name': SearchDatastore_Task, 'duration_secs': 0.025534} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.528937] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11b6801a-a94c-4188-91c1-7a65facc7e76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.535804] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 986.535804] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52be1f2d-b391-e7d2-55cf-ac3f828e477e" [ 986.535804] env[68437]: _type = "Task" [ 986.535804] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.546365] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52be1f2d-b391-e7d2-55cf-ac3f828e477e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.572499] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944503, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.684037} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.572499] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ada623a8-b0ce-4709-b2af-ad80b464af4e/ada623a8-b0ce-4709-b2af-ad80b464af4e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 986.572499] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 986.575267] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7488496d-1174-4cc2-9431-ca604a712ccb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.583163] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 986.583163] env[68437]: value = "task-2944509" [ 986.583163] env[68437]: _type = "Task" [ 986.583163] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.599141] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.739110] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ddacd2-e71a-47e4-b9ef-54f19f692cfe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.749707] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64dfac55-da14-4ecd-8200-57ab5ab390d7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.792332] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.793226] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053ab817-641a-49a0-b993-3923f1b81565 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.807615] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648c1301-186d-4b03-ab4f-1365b95eb7d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.811547] env[68437]: DEBUG oslo_vmware.api [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267248} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.811806] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.811988] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 986.812199] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.812351] env[68437]: INFO nova.compute.manager [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 986.812584] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 986.813846] env[68437]: DEBUG nova.compute.manager [-] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 986.813937] env[68437]: DEBUG nova.network.neutron [-] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 986.826210] env[68437]: DEBUG nova.compute.provider_tree [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.847390] env[68437]: DEBUG nova.network.neutron [-] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.956141] env[68437]: INFO nova.compute.manager [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Took 26.93 seconds to build instance. [ 987.051345] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52be1f2d-b391-e7d2-55cf-ac3f828e477e, 'name': SearchDatastore_Task, 'duration_secs': 0.01668} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.051345] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.051345] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 697d5011-fb4e-4542-851b-39953bbb293d/697d5011-fb4e-4542-851b-39953bbb293d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 987.051345] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1479519e-2d0c-49da-9c03-72748df30456 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.058174] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 987.058174] env[68437]: value = "task-2944510" [ 987.058174] env[68437]: _type = "Task" [ 987.058174] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.067341] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.094173] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078009} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.094173] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 987.094766] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fe958c-5580-444b-87d9-f5b1f14a5254 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.124020] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] ada623a8-b0ce-4709-b2af-ad80b464af4e/ada623a8-b0ce-4709-b2af-ad80b464af4e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.126791] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e940262-f370-4e14-9ded-e77be51ecced {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.152183] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 987.152183] env[68437]: value = "task-2944511" [ 987.152183] env[68437]: _type = "Task" [ 987.152183] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.164048] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944511, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.228472] env[68437]: DEBUG nova.compute.manager [req-0e379156-156a-4b68-be55-0787fafb8ad5 req-aa182e43-d431-4e6d-b155-529a91c681b4 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Received event network-vif-deleted-9260c00b-559c-42b0-8f99-ffca47f422d7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 987.228601] env[68437]: INFO nova.compute.manager [req-0e379156-156a-4b68-be55-0787fafb8ad5 req-aa182e43-d431-4e6d-b155-529a91c681b4 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Neutron deleted interface 9260c00b-559c-42b0-8f99-ffca47f422d7; detaching it from the instance and deleting it from the info cache [ 987.229285] env[68437]: DEBUG nova.network.neutron [req-0e379156-156a-4b68-be55-0787fafb8ad5 req-aa182e43-d431-4e6d-b155-529a91c681b4 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.329594] env[68437]: DEBUG nova.scheduler.client.report [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 987.349008] env[68437]: INFO nova.compute.manager [-] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Took 1.04 seconds to deallocate network for instance. [ 987.360182] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "05e07d7c-0161-463c-89f7-1bf28f680bde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.360707] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "05e07d7c-0161-463c-89f7-1bf28f680bde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.397094] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "2f368262-0825-4ccc-9b1e-523b705bcfce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.397405] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "2f368262-0825-4ccc-9b1e-523b705bcfce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.397643] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "2f368262-0825-4ccc-9b1e-523b705bcfce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.398112] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "2f368262-0825-4ccc-9b1e-523b705bcfce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.398347] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "2f368262-0825-4ccc-9b1e-523b705bcfce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.400786] env[68437]: INFO nova.compute.manager [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Terminating instance [ 987.458401] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ab9495ef-63f8-460a-902b-4983732fd2ec tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "7422ff70-901c-4343-9b9f-f12c52348d2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.442s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.572939] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944510, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.645641] env[68437]: INFO nova.compute.manager [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Rebuilding instance [ 987.664709] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944511, 'name': ReconfigVM_Task, 'duration_secs': 0.333806} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.665057] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Reconfigured VM instance instance-00000048 to attach disk [datastore2] ada623a8-b0ce-4709-b2af-ad80b464af4e/ada623a8-b0ce-4709-b2af-ad80b464af4e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.667721] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af3854ff-ec21-4bc1-bdc9-d6f63f4537e9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.678766] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 987.678766] env[68437]: value = "task-2944512" [ 987.678766] env[68437]: _type = "Task" [ 987.678766] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.689570] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944512, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.700351] env[68437]: DEBUG nova.compute.manager [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 987.701132] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58df0c03-25b2-49dd-abee-0b0f75ff1744 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.705534] env[68437]: DEBUG nova.network.neutron [-] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.734743] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d760940f-56c3-41ad-beca-2989323e788d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.745465] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc71d28-fe46-4df4-96f0-b0a83a944ff8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.784819] env[68437]: DEBUG nova.compute.manager [req-0e379156-156a-4b68-be55-0787fafb8ad5 req-aa182e43-d431-4e6d-b155-529a91c681b4 service nova] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Detach interface failed, port_id=9260c00b-559c-42b0-8f99-ffca47f422d7, reason: Instance d84c599e-29b2-45ec-a3f7-54ef85af9a3d could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 987.837361] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.928s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.838683] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.618s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.839224] env[68437]: DEBUG nova.objects.instance [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 987.859404] env[68437]: INFO nova.scheduler.client.report [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Deleted allocations for instance 1186da93-57aa-40f4-8aae-702d039844d4 [ 987.864221] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.864221] env[68437]: DEBUG nova.compute.manager [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 987.907029] env[68437]: DEBUG nova.compute.manager [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 987.907029] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 987.910029] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0301928-cfd8-4010-be30-8e4ec0a20ada {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.922943] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 987.925035] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eae200cf-a5de-410e-a009-244505e73279 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.934227] env[68437]: DEBUG oslo_vmware.api [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 987.934227] env[68437]: value = "task-2944513" [ 987.934227] env[68437]: _type = "Task" [ 987.934227] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.946224] env[68437]: DEBUG oslo_vmware.api [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.072417] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944510, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714894} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.072772] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 697d5011-fb4e-4542-851b-39953bbb293d/697d5011-fb4e-4542-851b-39953bbb293d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 988.073018] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 988.073292] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43e26700-d320-4ddb-a7a4-55572e60e9f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.081896] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 988.081896] env[68437]: value = "task-2944514" [ 988.081896] env[68437]: _type = "Task" [ 988.081896] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.092666] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944514, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.198023] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944512, 'name': Rename_Task, 'duration_secs': 0.17274} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.198023] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 988.198023] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a82b76d5-e85e-4d08-848e-802657480c04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.209161] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 988.209161] env[68437]: value = "task-2944515" [ 988.209161] env[68437]: _type = "Task" [ 988.209161] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.209161] env[68437]: INFO nova.compute.manager [-] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Took 1.39 seconds to deallocate network for instance. [ 988.230137] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944515, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.348231] env[68437]: DEBUG oslo_vmware.rw_handles [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247bf8e-6c14-58e9-0211-daf1b4abdea4/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 988.349207] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25c6bae-e219-4a4b-af80-f6dfd05b1252 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.353322] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5a656df3-caaf-442e-978a-1e3540cd70db tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.515s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.354851] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.369s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.354851] env[68437]: DEBUG nova.objects.instance [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lazy-loading 'resources' on Instance uuid 4254002c-d292-4f10-a3d0-387853dbbcb3 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 988.360460] env[68437]: DEBUG oslo_vmware.rw_handles [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247bf8e-6c14-58e9-0211-daf1b4abdea4/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 988.360792] env[68437]: ERROR oslo_vmware.rw_handles [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247bf8e-6c14-58e9-0211-daf1b4abdea4/disk-0.vmdk due to incomplete transfer. [ 988.361174] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-45e6e7e8-34b1-472e-b153-8f1e447e8db5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.373556] env[68437]: DEBUG oslo_vmware.rw_handles [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5247bf8e-6c14-58e9-0211-daf1b4abdea4/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 988.373823] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Uploaded image 7ae3c924-c7df-4c0e-b7ec-616d7490ffd0 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 988.376235] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 988.376778] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d46b28b-0bf1-4f20-b67e-b4922bef257e tempest-MigrationsAdminTest-854692231 tempest-MigrationsAdminTest-854692231-project-member] Lock "1186da93-57aa-40f4-8aae-702d039844d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.827s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.381284] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-15fe07c4-9a8e-4d51-9cba-21e45939000e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.390799] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 988.390799] env[68437]: value = "task-2944516" [ 988.390799] env[68437]: _type = "Task" [ 988.390799] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.395680] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.401789] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944516, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.446900] env[68437]: DEBUG oslo_vmware.api [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944513, 'name': PowerOffVM_Task, 'duration_secs': 0.213386} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.447321] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 988.448228] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 988.448555] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71bedfcd-e050-4f51-a870-82976f3127f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.533694] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 988.534128] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 988.534128] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleting the datastore file [datastore2] 2f368262-0825-4ccc-9b1e-523b705bcfce {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.534392] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e4b5b79-1dab-4c8a-a8ee-c11f3234abb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.541019] env[68437]: DEBUG oslo_vmware.api [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for the task: (returnval){ [ 988.541019] env[68437]: value = "task-2944518" [ 988.541019] env[68437]: _type = "Task" [ 988.541019] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.549405] env[68437]: DEBUG oslo_vmware.api [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.592038] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944514, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147252} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.593031] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 988.593136] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4948d029-5778-4946-a920-bcf4b9daab64 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.623393] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 697d5011-fb4e-4542-851b-39953bbb293d/697d5011-fb4e-4542-851b-39953bbb293d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.624009] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bdcd913-b639-4613-9dc3-486e2d897165 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.650895] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 988.650895] env[68437]: value = "task-2944519" [ 988.650895] env[68437]: _type = "Task" [ 988.650895] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.660933] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944519, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.721072] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944515, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.725433] env[68437]: DEBUG oslo_concurrency.lockutils [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.725433] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 988.725433] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-020023de-b649-45f1-a748-2b53a9937c50 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.734723] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 988.734723] env[68437]: value = "task-2944520" [ 988.734723] env[68437]: _type = "Task" [ 988.734723] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.744091] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944520, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.902030] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944516, 'name': Destroy_Task, 'duration_secs': 0.435824} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.905108] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Destroyed the VM [ 988.905108] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 988.905108] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-50eed410-00b8-4e23-aaa3-ea3dae827faa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.914573] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 988.914573] env[68437]: value = "task-2944521" [ 988.914573] env[68437]: _type = "Task" [ 988.914573] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.928628] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944521, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.055724] env[68437]: DEBUG oslo_vmware.api [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Task: {'id': task-2944518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203701} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.056063] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.056383] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.056526] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.056725] env[68437]: INFO nova.compute.manager [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Took 1.15 seconds to destroy the instance on the hypervisor. [ 989.057127] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.057344] env[68437]: DEBUG nova.compute.manager [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 989.057630] env[68437]: DEBUG nova.network.neutron [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 989.164071] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944519, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.197666] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7420bdf-9283-43a0-9025-f2ac3bc8402f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.207970] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3869d072-2148-40c1-9994-647026c2715c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.221901] env[68437]: DEBUG oslo_vmware.api [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944515, 'name': PowerOnVM_Task, 'duration_secs': 0.802313} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.251289] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 989.251636] env[68437]: INFO nova.compute.manager [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Took 12.19 seconds to spawn the instance on the hypervisor. [ 989.252041] env[68437]: DEBUG nova.compute.manager [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.253662] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa2a536-14e9-451a-a1e9-ceecaf8139af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.259606] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027d2fe7-920d-4e28-8068-a696ac94c8ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.276029] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944520, 'name': PowerOffVM_Task, 'duration_secs': 0.240175} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.276283] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.276523] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.277809] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc86b922-a677-4412-8e87-906fe8de4649 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.282593] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260d5c1c-84cb-47e2-a695-fae8e87525cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.296933] env[68437]: DEBUG nova.compute.provider_tree [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.300275] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 989.300802] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05b14921-9393-4d5d-8012-7c4b4e3adf5d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.329065] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 989.333338] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 989.333338] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Deleting the datastore file [datastore1] 7422ff70-901c-4343-9b9f-f12c52348d2c {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 989.333338] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8cc2745-f38f-4b2c-8df5-ec620988ac1a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.335958] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 989.335958] env[68437]: value = "task-2944523" [ 989.335958] env[68437]: _type = "Task" [ 989.335958] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.344080] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.429785] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944521, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.467732] env[68437]: DEBUG nova.compute.manager [req-e2ed358c-083a-4b1c-ab0b-5fc0fe1b6187 req-cbec8dbc-d184-428d-82af-9f1ca3929fdd service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Received event network-vif-deleted-a915dbf0-9e3f-41da-b43b-dd0a4225b839 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 989.467732] env[68437]: INFO nova.compute.manager [req-e2ed358c-083a-4b1c-ab0b-5fc0fe1b6187 req-cbec8dbc-d184-428d-82af-9f1ca3929fdd service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Neutron deleted interface a915dbf0-9e3f-41da-b43b-dd0a4225b839; detaching it from the instance and deleting it from the info cache [ 989.468104] env[68437]: DEBUG nova.network.neutron [req-e2ed358c-083a-4b1c-ab0b-5fc0fe1b6187 req-cbec8dbc-d184-428d-82af-9f1ca3929fdd service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.661936] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944519, 'name': ReconfigVM_Task, 'duration_secs': 0.624894} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.662240] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 697d5011-fb4e-4542-851b-39953bbb293d/697d5011-fb4e-4542-851b-39953bbb293d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.662864] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6332fa40-fc5d-469c-88f1-c4ae9786e223 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.669658] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 989.669658] env[68437]: value = "task-2944524" [ 989.669658] env[68437]: _type = "Task" [ 989.669658] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.677557] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944524, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.787757] env[68437]: INFO nova.compute.manager [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Took 30.46 seconds to build instance. [ 989.802492] env[68437]: DEBUG nova.scheduler.client.report [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 989.847291] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155334} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.847573] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.847798] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.847989] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.928906] env[68437]: DEBUG oslo_vmware.api [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944521, 'name': RemoveSnapshot_Task, 'duration_secs': 0.951504} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.929339] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 989.929581] env[68437]: INFO nova.compute.manager [None req-61f0e418-6f91-47f5-8c1e-f0d43e64b60c tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Took 15.62 seconds to snapshot the instance on the hypervisor. [ 989.944404] env[68437]: DEBUG nova.network.neutron [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.970834] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df110937-4cc5-4135-94b6-2b90dcdbe3a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.982398] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424dee75-fec8-43e7-bd02-28a855717d35 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.029316] env[68437]: DEBUG nova.compute.manager [req-e2ed358c-083a-4b1c-ab0b-5fc0fe1b6187 req-cbec8dbc-d184-428d-82af-9f1ca3929fdd service nova] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Detach interface failed, port_id=a915dbf0-9e3f-41da-b43b-dd0a4225b839, reason: Instance 2f368262-0825-4ccc-9b1e-523b705bcfce could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 990.181048] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944524, 'name': Rename_Task, 'duration_secs': 0.159562} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.181048] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.181048] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46167152-5978-4e5e-ac54-0b19a8107796 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.189753] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 990.189753] env[68437]: value = "task-2944525" [ 990.189753] env[68437]: _type = "Task" [ 990.189753] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.201753] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944525, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.289833] env[68437]: DEBUG oslo_concurrency.lockutils [None req-38cf7303-9c58-413a-a9db-ebe260ae6e3f tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.980s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.309122] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.955s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.313565] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.521s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.315041] env[68437]: INFO nova.compute.claims [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.335874] env[68437]: INFO nova.scheduler.client.report [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Deleted allocations for instance 4254002c-d292-4f10-a3d0-387853dbbcb3 [ 990.447644] env[68437]: INFO nova.compute.manager [-] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Took 1.39 seconds to deallocate network for instance. [ 990.623740] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "ada623a8-b0ce-4709-b2af-ad80b464af4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.624068] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.624299] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "ada623a8-b0ce-4709-b2af-ad80b464af4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.624490] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.624692] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.627381] env[68437]: INFO nova.compute.manager [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Terminating instance [ 990.700075] env[68437]: DEBUG oslo_vmware.api [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944525, 'name': PowerOnVM_Task, 'duration_secs': 0.460174} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.700313] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.701492] env[68437]: INFO nova.compute.manager [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Took 8.77 seconds to spawn the instance on the hypervisor. [ 990.701492] env[68437]: DEBUG nova.compute.manager [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 990.701492] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82dea3f4-db59-40d6-880e-ac472c46848a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.847032] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b1b16d9-0835-4354-b1ba-8e3a0cb46c25 tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "4254002c-d292-4f10-a3d0-387853dbbcb3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.588s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.885367] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 990.885930] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.886036] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 990.886334] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.886597] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 990.887060] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 990.887826] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 990.887826] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 990.888045] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 990.888410] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 990.888589] env[68437]: DEBUG nova.virt.hardware [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 990.890343] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8ae4d6-b4c6-4daf-85fc-3c1b642b16b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.916809] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbad7640-525f-4f77-a397-81b67a022d4b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.932291] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 990.938082] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 990.938383] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 990.938963] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02964bb8-f0cd-4e97-8fe8-04aefd936fa4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.954116] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.959222] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 990.959222] env[68437]: value = "task-2944526" [ 990.959222] env[68437]: _type = "Task" [ 990.959222] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.968214] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944526, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.131620] env[68437]: DEBUG nova.compute.manager [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 991.131871] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.132851] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa146a7-7798-401b-bbb1-462f492569ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.142648] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.142917] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6199283-5fe2-433e-9478-77c6aad68c38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.149459] env[68437]: DEBUG oslo_vmware.api [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 991.149459] env[68437]: value = "task-2944527" [ 991.149459] env[68437]: _type = "Task" [ 991.149459] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.158385] env[68437]: DEBUG oslo_vmware.api [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944527, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.221048] env[68437]: INFO nova.compute.manager [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Took 29.46 seconds to build instance. [ 991.328027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquiring lock "e51356e4-7647-4678-bb4f-f069b5c7fef6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.328027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "e51356e4-7647-4678-bb4f-f069b5c7fef6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.469925] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944526, 'name': CreateVM_Task, 'duration_secs': 0.453038} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.472439] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 991.473196] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.473450] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.473866] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 991.474654] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2465488-0ee6-4546-9f30-b9b85f1f6a09 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.479724] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 991.479724] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528a21ef-97d8-59e2-3ef4-05c1b3cc26f7" [ 991.479724] env[68437]: _type = "Task" [ 991.479724] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.493385] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528a21ef-97d8-59e2-3ef4-05c1b3cc26f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.602539] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd8d03d-2d8c-4aa2-b6a7-7fa7d1e7d3fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.610991] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e6d5e0-e87a-4831-b2df-c56ddcbc6a16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.641310] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421530d1-420f-4d04-a1da-18a3c58e6a77 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.649111] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a317a6b-0300-45a3-bdbc-c9d5bfcf9f79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.662395] env[68437]: DEBUG oslo_vmware.api [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944527, 'name': PowerOffVM_Task, 'duration_secs': 0.219791} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.670069] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.670254] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 991.670740] env[68437]: DEBUG nova.compute.provider_tree [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.672099] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4faabc1-921f-4902-90f0-c2ad3128dafd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.673693] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.673909] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.674116] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.674298] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.674458] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.676416] env[68437]: INFO nova.compute.manager [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Terminating instance [ 991.722968] env[68437]: DEBUG oslo_concurrency.lockutils [None req-24e139f6-f217-4d2e-90dd-362d6a5e7fc2 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.971s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.788942] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 991.789192] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 991.789383] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Deleting the datastore file [datastore2] ada623a8-b0ce-4709-b2af-ad80b464af4e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.789711] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-700d7b60-da84-4e39-a288-5866267896a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.797078] env[68437]: DEBUG oslo_vmware.api [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for the task: (returnval){ [ 991.797078] env[68437]: value = "task-2944529" [ 991.797078] env[68437]: _type = "Task" [ 991.797078] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.805110] env[68437]: DEBUG oslo_vmware.api [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.830154] env[68437]: DEBUG nova.compute.manager [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 991.990180] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528a21ef-97d8-59e2-3ef4-05c1b3cc26f7, 'name': SearchDatastore_Task, 'duration_secs': 0.011823} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.990180] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.990536] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.990667] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.990759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.990939] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.991216] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94f36b83-44c4-40e7-b8cc-5778e6ec5db3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.995240] env[68437]: DEBUG nova.compute.manager [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 991.996042] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c47a070-afa7-4c7a-b73f-69b26cdcd80a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.000141] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.000321] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 992.001060] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24c1f343-55b2-4ddd-b484-61ed156fb56c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.010372] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 992.010372] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d2c0ac-3845-4967-68f0-1db04296c410" [ 992.010372] env[68437]: _type = "Task" [ 992.010372] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.018308] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d2c0ac-3845-4967-68f0-1db04296c410, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.174737] env[68437]: DEBUG nova.scheduler.client.report [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.179892] env[68437]: DEBUG nova.compute.manager [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 992.180118] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.180962] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221649da-ac52-4017-b5d5-0095396f737d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.189267] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.190087] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41a21e75-e289-43c7-a191-ccd7521ba615 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.196858] env[68437]: DEBUG oslo_vmware.api [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 992.196858] env[68437]: value = "task-2944530" [ 992.196858] env[68437]: _type = "Task" [ 992.196858] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.205100] env[68437]: DEBUG oslo_vmware.api [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944530, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.308086] env[68437]: DEBUG oslo_vmware.api [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Task: {'id': task-2944529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177993} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.308380] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.308571] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.308751] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.308928] env[68437]: INFO nova.compute.manager [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Took 1.18 seconds to destroy the instance on the hypervisor. [ 992.309192] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 992.309389] env[68437]: DEBUG nova.compute.manager [-] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 992.309485] env[68437]: DEBUG nova.network.neutron [-] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 992.348495] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.408824] env[68437]: DEBUG nova.compute.manager [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Stashing vm_state: active {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 992.508996] env[68437]: INFO nova.compute.manager [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] instance snapshotting [ 992.519176] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97e7b0e-598c-41b5-98ee-d40b89da4ff3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.526807] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d2c0ac-3845-4967-68f0-1db04296c410, 'name': SearchDatastore_Task, 'duration_secs': 0.010209} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.549944] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6041534-feef-49ec-9c17-3ba9b43b7ea5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.553461] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fb287f-3f2d-452b-a97e-d6b7c27d9be2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.566323] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 992.566323] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52221925-9fa5-1ea5-aef5-49946362823c" [ 992.566323] env[68437]: _type = "Task" [ 992.566323] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.574024] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52221925-9fa5-1ea5-aef5-49946362823c, 'name': SearchDatastore_Task, 'duration_secs': 0.01135} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.574426] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.574757] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 7422ff70-901c-4343-9b9f-f12c52348d2c/7422ff70-901c-4343-9b9f-f12c52348d2c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 992.575085] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52821dc4-145a-4e2a-811f-6535a5d53c24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.583577] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 992.583577] env[68437]: value = "task-2944531" [ 992.583577] env[68437]: _type = "Task" [ 992.583577] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.591981] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.681072] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.681072] env[68437]: DEBUG nova.compute.manager [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 992.684602] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.821s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.685314] env[68437]: DEBUG nova.objects.instance [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lazy-loading 'resources' on Instance uuid 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.711288] env[68437]: DEBUG oslo_vmware.api [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944530, 'name': PowerOffVM_Task, 'duration_secs': 0.386117} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.711642] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 992.711902] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.712257] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0517cdd5-bf1d-4476-b495-13a891775f02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.784086] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.784425] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.784663] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Deleting the datastore file [datastore1] 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.785366] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1743090-7aec-4a89-823d-9e6571d4b196 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.794204] env[68437]: DEBUG oslo_vmware.api [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for the task: (returnval){ [ 992.794204] env[68437]: value = "task-2944533" [ 992.794204] env[68437]: _type = "Task" [ 992.794204] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.803303] env[68437]: DEBUG oslo_vmware.api [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944533, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.858355] env[68437]: DEBUG nova.compute.manager [req-6ecef3bf-f44a-4c50-a323-f00ad37ed77e req-d3f18c57-a405-41eb-9668-9060ab980ee7 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Received event network-vif-deleted-15bbe98e-d7a1-4f4f-929a-e9c9e1b91362 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 992.858575] env[68437]: INFO nova.compute.manager [req-6ecef3bf-f44a-4c50-a323-f00ad37ed77e req-d3f18c57-a405-41eb-9668-9060ab980ee7 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Neutron deleted interface 15bbe98e-d7a1-4f4f-929a-e9c9e1b91362; detaching it from the instance and deleting it from the info cache [ 992.858837] env[68437]: DEBUG nova.network.neutron [req-6ecef3bf-f44a-4c50-a323-f00ad37ed77e req-d3f18c57-a405-41eb-9668-9060ab980ee7 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Updating instance_info_cache with network_info: [{"id": "27913afc-65e0-4710-a03a-e1f99ee3ff22", "address": "fa:16:3e:76:bd:87", "network": {"id": "b3af08e7-2e52-4ea7-8f47-d3269140178e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-695282770", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "376f884a99d6438aa53e3df5b9c34450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27913afc-65", "ovs_interfaceid": "27913afc-65e0-4710-a03a-e1f99ee3ff22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.871231] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.871547] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.871834] env[68437]: DEBUG nova.compute.manager [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 992.872856] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647a09ce-601e-4ce4-b936-e994dcb1e948 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.881991] env[68437]: DEBUG nova.compute.manager [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 992.882760] env[68437]: DEBUG nova.objects.instance [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'flavor' on Instance uuid 8ccd7176-55c0-4118-a07e-3c4bdbba9795 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.929472] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.067030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 993.067364] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ce4eebe8-21c4-43e1-84e6-7f6c832a13ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.076695] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 993.076695] env[68437]: value = "task-2944534" [ 993.076695] env[68437]: _type = "Task" [ 993.076695] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.086065] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944534, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.097079] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944531, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44608} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.097486] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 7422ff70-901c-4343-9b9f-f12c52348d2c/7422ff70-901c-4343-9b9f-f12c52348d2c.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 993.097614] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 993.097957] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-731a022f-e2f6-40a9-84d3-f6bb1340cd2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.106676] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 993.106676] env[68437]: value = "task-2944535" [ 993.106676] env[68437]: _type = "Task" [ 993.106676] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.115550] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944535, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.188565] env[68437]: DEBUG nova.compute.utils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 993.193174] env[68437]: DEBUG nova.compute.manager [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 993.193404] env[68437]: DEBUG nova.network.neutron [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 993.245934] env[68437]: DEBUG nova.policy [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec1074dd1b444e45beadcccfe6671c76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f1c3ca0e78f472e8c127fa68ed610f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 993.308928] env[68437]: DEBUG oslo_vmware.api [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Task: {'id': task-2944533, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303199} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.309328] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.309530] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.309731] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.309977] env[68437]: INFO nova.compute.manager [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 993.310252] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 993.310450] env[68437]: DEBUG nova.compute.manager [-] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 993.310547] env[68437]: DEBUG nova.network.neutron [-] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 993.367048] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d96d3b74-fdd0-4fa5-9878-8b9570ed6072 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.380057] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f20d863-101e-4db1-b376-74758700134a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.422420] env[68437]: DEBUG nova.compute.manager [req-6ecef3bf-f44a-4c50-a323-f00ad37ed77e req-d3f18c57-a405-41eb-9668-9060ab980ee7 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Detach interface failed, port_id=15bbe98e-d7a1-4f4f-929a-e9c9e1b91362, reason: Instance ada623a8-b0ce-4709-b2af-ad80b464af4e could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 993.494223] env[68437]: DEBUG nova.network.neutron [-] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.522458] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b720fc-4f39-4c46-964e-10885fcc9f9a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.530946] env[68437]: DEBUG nova.network.neutron [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Successfully created port: ae035d33-feaf-43d3-a5ed-93b396819be3 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.534750] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78ca921-8c1b-4346-a84b-508bfae0a79d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.572116] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56065433-7506-4fcc-a7d2-2035d1e3bd48 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.585464] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfb5a61-1989-4484-a804-28ea2e7e464e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.595363] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944534, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.604353] env[68437]: DEBUG nova.compute.provider_tree [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.618126] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944535, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078508} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.618126] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.618126] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0040cfb-46f0-44f8-b07a-998c7971a2ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.641310] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 7422ff70-901c-4343-9b9f-f12c52348d2c/7422ff70-901c-4343-9b9f-f12c52348d2c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.643319] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17611e45-aad5-47b9-88a1-faabeb9d5fa3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.660461] env[68437]: DEBUG nova.compute.manager [req-7c6ca887-23cb-4a82-84de-53e7bcb10139 req-cffc14d8-2ed3-4340-9d96-bdcf0f1e1428 service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Received event network-vif-deleted-77a40424-4554-49db-9885-dbac2faad783 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 993.660687] env[68437]: INFO nova.compute.manager [req-7c6ca887-23cb-4a82-84de-53e7bcb10139 req-cffc14d8-2ed3-4340-9d96-bdcf0f1e1428 service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Neutron deleted interface 77a40424-4554-49db-9885-dbac2faad783; detaching it from the instance and deleting it from the info cache [ 993.660872] env[68437]: DEBUG nova.network.neutron [req-7c6ca887-23cb-4a82-84de-53e7bcb10139 req-cffc14d8-2ed3-4340-9d96-bdcf0f1e1428 service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.671241] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 993.671241] env[68437]: value = "task-2944536" [ 993.671241] env[68437]: _type = "Task" [ 993.671241] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.681762] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944536, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.693981] env[68437]: DEBUG nova.compute.manager [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 993.896624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 993.897051] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff2d9cf1-7898-46ed-8190-1dd12de4db9b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.916548] env[68437]: DEBUG oslo_vmware.api [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 993.916548] env[68437]: value = "task-2944537" [ 993.916548] env[68437]: _type = "Task" [ 993.916548] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.929022] env[68437]: DEBUG oslo_vmware.api [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.997066] env[68437]: INFO nova.compute.manager [-] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Took 1.69 seconds to deallocate network for instance. [ 994.077455] env[68437]: DEBUG nova.network.neutron [-] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.090910] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944534, 'name': CreateSnapshot_Task, 'duration_secs': 0.594033} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.091206] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 994.092115] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff5eb1f-2c7d-4c41-a1a1-6189ad25faf8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.111231] env[68437]: DEBUG nova.scheduler.client.report [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.164521] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93c9be1d-398c-4717-ae9c-162e43908991 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.180922] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48100d1d-b8a6-4353-adeb-545b9e1a75c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.210605] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944536, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.223291] env[68437]: DEBUG nova.compute.manager [req-7c6ca887-23cb-4a82-84de-53e7bcb10139 req-cffc14d8-2ed3-4340-9d96-bdcf0f1e1428 service nova] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Detach interface failed, port_id=77a40424-4554-49db-9885-dbac2faad783, reason: Instance 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 994.429022] env[68437]: DEBUG oslo_vmware.api [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944537, 'name': PowerOffVM_Task, 'duration_secs': 0.230063} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.429340] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 994.429542] env[68437]: DEBUG nova.compute.manager [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 994.430380] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9240fce0-8463-4d93-92ed-a7af0782e924 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.510612] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.578615] env[68437]: INFO nova.compute.manager [-] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Took 1.27 seconds to deallocate network for instance. [ 994.619248] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.935s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.629136] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 994.629442] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.234s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.631218] env[68437]: INFO nova.compute.claims [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 994.634287] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3dfd2019-1418-48f3-9531-9bfcfa179fb5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.644248] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 994.644248] env[68437]: value = "task-2944538" [ 994.644248] env[68437]: _type = "Task" [ 994.644248] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.648650] env[68437]: INFO nova.scheduler.client.report [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Deleted allocations for instance 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5 [ 994.658150] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944538, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.683574] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944536, 'name': ReconfigVM_Task, 'duration_secs': 0.699483} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.683884] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 7422ff70-901c-4343-9b9f-f12c52348d2c/7422ff70-901c-4343-9b9f-f12c52348d2c.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.684516] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-165e6d3b-c865-4d9b-ada1-8a7d542a81ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.691242] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 994.691242] env[68437]: value = "task-2944539" [ 994.691242] env[68437]: _type = "Task" [ 994.691242] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.701039] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944539, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.724453] env[68437]: DEBUG nova.compute.manager [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 994.753709] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 994.754078] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.754235] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 994.754390] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.754535] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 994.754685] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 994.754925] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 994.755111] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 994.755365] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 994.755535] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 994.755708] env[68437]: DEBUG nova.virt.hardware [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 994.756615] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b10ea3a-119a-4f0c-ac0a-2450843b4c67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.766173] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc008b0a-ba48-4405-a038-372532f27fd5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.883643] env[68437]: DEBUG nova.compute.manager [req-fb16176f-7232-47da-95f6-5c3c53bc24ce req-0f112d2a-3cf4-411b-b7bb-f3aea0cde2d2 service nova] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Received event network-vif-deleted-27913afc-65e0-4710-a03a-e1f99ee3ff22 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 994.942498] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7078ee1e-6c03-420a-b503-e9d4f51abf3a tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.071s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.019357] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Acquiring lock "29e9555b-f928-43e7-a3a3-869ed07d7326" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.019526] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "29e9555b-f928-43e7-a3a3-869ed07d7326" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.019670] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Acquiring lock "29e9555b-f928-43e7-a3a3-869ed07d7326-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.019862] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "29e9555b-f928-43e7-a3a3-869ed07d7326-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.020794] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "29e9555b-f928-43e7-a3a3-869ed07d7326-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.022386] env[68437]: INFO nova.compute.manager [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Terminating instance [ 995.084865] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.159517] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944538, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.159989] env[68437]: DEBUG oslo_concurrency.lockutils [None req-541f881e-1b24-4aa7-860d-783080fee923 tempest-ServersListShow296Test-1404307527 tempest-ServersListShow296Test-1404307527-project-member] Lock "9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.561s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.202632] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944539, 'name': Rename_Task, 'duration_secs': 0.163869} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.202711] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 995.202940] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d62bbdc-97f1-41a8-9306-5eaa4c99dffa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.210658] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 995.210658] env[68437]: value = "task-2944540" [ 995.210658] env[68437]: _type = "Task" [ 995.210658] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.221660] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944540, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.526287] env[68437]: DEBUG nova.compute.manager [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 995.526552] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.526844] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b9e3df8-cc68-4fc6-9301-e7cbdfea215b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.535602] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for the task: (returnval){ [ 995.535602] env[68437]: value = "task-2944541" [ 995.535602] env[68437]: _type = "Task" [ 995.535602] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.545457] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.614111] env[68437]: DEBUG nova.network.neutron [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Successfully updated port: ae035d33-feaf-43d3-a5ed-93b396819be3 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.656321] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944538, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.724609] env[68437]: DEBUG oslo_vmware.api [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944540, 'name': PowerOnVM_Task, 'duration_secs': 0.480302} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.724906] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.725352] env[68437]: DEBUG nova.compute.manager [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.726060] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3010e864-fab4-49a3-b479-7520b0136349 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.797688] env[68437]: DEBUG nova.objects.instance [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'flavor' on Instance uuid 8ccd7176-55c0-4118-a07e-3c4bdbba9795 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.965745] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d267ba7-e470-4b9b-962b-ee4c9c477fb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.973960] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348a141d-04d8-4aa3-94ae-9b902cdd4be3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.014496] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ac7917-cc5d-49c3-889d-3fc5c5176550 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.026837] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d296cfa-ceb2-45fc-a41d-4d7d6db50f63 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.043509] env[68437]: DEBUG nova.compute.provider_tree [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.054419] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944541, 'name': PowerOffVM_Task, 'duration_secs': 0.22254} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.054419] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.054419] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 996.054690] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590922', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'name': 'volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29e9555b-f928-43e7-a3a3-869ed07d7326', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'serial': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 996.055942] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d705866b-ce38-45d1-872d-156e931700e4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.076364] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed88ff4-20d2-4c64-882c-a2f67e8f4360 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.084059] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4a4da0-1799-4e1d-9dea-b1197e6bb101 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.103215] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c5440c-d244-4517-b093-d8d4c85b66e1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.120431] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "refresh_cache-ae32443d-3b55-4bd7-8f07-e66d206ec1d1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.120526] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "refresh_cache-ae32443d-3b55-4bd7-8f07-e66d206ec1d1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.120647] env[68437]: DEBUG nova.network.neutron [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 996.122275] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] The volume has not been displaced from its original location: [datastore1] volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7/volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 996.128683] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Reconfiguring VM instance instance-0000002a to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 996.129821] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-006415d0-3407-45a5-8555-7db80d925164 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.154021] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for the task: (returnval){ [ 996.154021] env[68437]: value = "task-2944542" [ 996.154021] env[68437]: _type = "Task" [ 996.154021] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.159214] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944538, 'name': CloneVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.164318] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944542, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.245972] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.304582] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.304792] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.304981] env[68437]: DEBUG nova.network.neutron [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 996.305177] env[68437]: DEBUG nova.objects.instance [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'info_cache' on Instance uuid 8ccd7176-55c0-4118-a07e-3c4bdbba9795 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.522556] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "7422ff70-901c-4343-9b9f-f12c52348d2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.522706] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "7422ff70-901c-4343-9b9f-f12c52348d2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.523207] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "7422ff70-901c-4343-9b9f-f12c52348d2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.523207] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "7422ff70-901c-4343-9b9f-f12c52348d2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.523386] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "7422ff70-901c-4343-9b9f-f12c52348d2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.525530] env[68437]: INFO nova.compute.manager [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Terminating instance [ 996.551636] env[68437]: DEBUG nova.scheduler.client.report [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 996.657456] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944538, 'name': CloneVM_Task, 'duration_secs': 1.579063} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.660204] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Created linked-clone VM from snapshot [ 996.660912] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d4ed58-8f89-4da6-8d15-e1f6f57242d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.665596] env[68437]: DEBUG nova.network.neutron [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 996.673312] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Uploading image 0d9ddd6f-fb4b-4dd5-bcab-7a4edd9c7848 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 996.677827] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944542, 'name': ReconfigVM_Task, 'duration_secs': 0.271418} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.678099] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Reconfigured VM instance instance-0000002a to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 996.684920] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f9f6e19-cfd4-41ac-a3af-ce1a366c3b95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.702450] env[68437]: DEBUG oslo_vmware.rw_handles [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 996.702450] env[68437]: value = "vm-591051" [ 996.702450] env[68437]: _type = "VirtualMachine" [ 996.702450] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 996.702710] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-17575da9-c864-4f4f-937a-8df93389bf79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.705413] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for the task: (returnval){ [ 996.705413] env[68437]: value = "task-2944543" [ 996.705413] env[68437]: _type = "Task" [ 996.705413] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.710032] env[68437]: DEBUG oslo_vmware.rw_handles [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lease: (returnval){ [ 996.710032] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a7397e-a0af-f267-9fef-c77775376aa0" [ 996.710032] env[68437]: _type = "HttpNfcLease" [ 996.710032] env[68437]: } obtained for exporting VM: (result){ [ 996.710032] env[68437]: value = "vm-591051" [ 996.710032] env[68437]: _type = "VirtualMachine" [ 996.710032] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 996.710301] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the lease: (returnval){ [ 996.710301] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a7397e-a0af-f267-9fef-c77775376aa0" [ 996.710301] env[68437]: _type = "HttpNfcLease" [ 996.710301] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 996.713407] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944543, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.719684] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 996.719684] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a7397e-a0af-f267-9fef-c77775376aa0" [ 996.719684] env[68437]: _type = "HttpNfcLease" [ 996.719684] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 996.808229] env[68437]: DEBUG nova.objects.base [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Object Instance<8ccd7176-55c0-4118-a07e-3c4bdbba9795> lazy-loaded attributes: flavor,info_cache {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 996.879153] env[68437]: DEBUG nova.network.neutron [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Updating instance_info_cache with network_info: [{"id": "ae035d33-feaf-43d3-a5ed-93b396819be3", "address": "fa:16:3e:eb:56:72", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae035d33-fe", "ovs_interfaceid": "ae035d33-feaf-43d3-a5ed-93b396819be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.909552] env[68437]: DEBUG nova.compute.manager [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Received event network-vif-plugged-ae035d33-feaf-43d3-a5ed-93b396819be3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 996.909835] env[68437]: DEBUG oslo_concurrency.lockutils [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] Acquiring lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.910115] env[68437]: DEBUG oslo_concurrency.lockutils [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] Lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.910420] env[68437]: DEBUG oslo_concurrency.lockutils [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] Lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.910599] env[68437]: DEBUG nova.compute.manager [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] No waiting events found dispatching network-vif-plugged-ae035d33-feaf-43d3-a5ed-93b396819be3 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 996.910817] env[68437]: WARNING nova.compute.manager [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Received unexpected event network-vif-plugged-ae035d33-feaf-43d3-a5ed-93b396819be3 for instance with vm_state building and task_state spawning. [ 996.911033] env[68437]: DEBUG nova.compute.manager [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Received event network-changed-ae035d33-feaf-43d3-a5ed-93b396819be3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 996.911360] env[68437]: DEBUG nova.compute.manager [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Refreshing instance network info cache due to event network-changed-ae035d33-feaf-43d3-a5ed-93b396819be3. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 996.911443] env[68437]: DEBUG oslo_concurrency.lockutils [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] Acquiring lock "refresh_cache-ae32443d-3b55-4bd7-8f07-e66d206ec1d1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.033046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "refresh_cache-7422ff70-901c-4343-9b9f-f12c52348d2c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.033385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquired lock "refresh_cache-7422ff70-901c-4343-9b9f-f12c52348d2c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.033385] env[68437]: DEBUG nova.network.neutron [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 997.055862] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.056414] env[68437]: DEBUG nova.compute.manager [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 997.060019] env[68437]: DEBUG oslo_concurrency.lockutils [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.336s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.060258] env[68437]: DEBUG nova.objects.instance [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lazy-loading 'resources' on Instance uuid d84c599e-29b2-45ec-a3f7-54ef85af9a3d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.218106] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944543, 'name': ReconfigVM_Task, 'duration_secs': 0.319017} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.221410] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-590922', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'name': 'volume-d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '29e9555b-f928-43e7-a3a3-869ed07d7326', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7', 'serial': 'd01c66e3-87e9-40bc-95f1-9f03e1a6d5a7'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 997.221675] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 997.223020] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06746f38-03f1-496d-b5b5-802c3ec56a44 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.228940] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 997.228940] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a7397e-a0af-f267-9fef-c77775376aa0" [ 997.228940] env[68437]: _type = "HttpNfcLease" [ 997.228940] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 997.231015] env[68437]: DEBUG oslo_vmware.rw_handles [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 997.231015] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a7397e-a0af-f267-9fef-c77775376aa0" [ 997.231015] env[68437]: _type = "HttpNfcLease" [ 997.231015] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 997.231306] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 997.232064] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765e4fdd-7c7e-47c5-a27a-d63be05be5cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.234431] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3b164a6-a582-47bc-bf78-09eb3375ce22 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.241727] env[68437]: DEBUG oslo_vmware.rw_handles [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ce3581-1768-e4fc-6f5c-59adaa935a3b/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 997.241937] env[68437]: DEBUG oslo_vmware.rw_handles [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ce3581-1768-e4fc-6f5c-59adaa935a3b/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 997.321179] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 997.321470] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 997.321808] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Deleting the datastore file [datastore1] 29e9555b-f928-43e7-a3a3-869ed07d7326 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 997.322154] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0c8f1e2-b129-4d2f-b7b5-3b217c1bbcf2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.330225] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for the task: (returnval){ [ 997.330225] env[68437]: value = "task-2944546" [ 997.330225] env[68437]: _type = "Task" [ 997.330225] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.339300] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.343978] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3e479e88-976e-4e3f-be2a-304ec7c489d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.378531] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "refresh_cache-ae32443d-3b55-4bd7-8f07-e66d206ec1d1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.378856] env[68437]: DEBUG nova.compute.manager [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Instance network_info: |[{"id": "ae035d33-feaf-43d3-a5ed-93b396819be3", "address": "fa:16:3e:eb:56:72", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae035d33-fe", "ovs_interfaceid": "ae035d33-feaf-43d3-a5ed-93b396819be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 997.379407] env[68437]: DEBUG oslo_concurrency.lockutils [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] Acquired lock "refresh_cache-ae32443d-3b55-4bd7-8f07-e66d206ec1d1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.379622] env[68437]: DEBUG nova.network.neutron [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Refreshing network info cache for port ae035d33-feaf-43d3-a5ed-93b396819be3 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 997.380862] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:56:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae035d33-feaf-43d3-a5ed-93b396819be3', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 997.389133] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 997.391721] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 997.392203] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f2e89c1-2bea-427e-bc26-0c44d5cf4c04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.413601] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 997.413601] env[68437]: value = "task-2944547" [ 997.413601] env[68437]: _type = "Task" [ 997.413601] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.423687] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944547, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.562201] env[68437]: DEBUG nova.compute.utils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 997.568406] env[68437]: DEBUG nova.compute.manager [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 997.568406] env[68437]: DEBUG nova.network.neutron [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 997.592848] env[68437]: DEBUG nova.network.neutron [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 997.605125] env[68437]: DEBUG nova.network.neutron [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance_info_cache with network_info: [{"id": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "address": "fa:16:3e:cc:6f:88", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec75d03-3e", "ovs_interfaceid": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.675606] env[68437]: DEBUG nova.policy [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ff3c9a96f10413f860946488fa85aee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38ad479949b24307b08e16fdb821c76f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 997.681621] env[68437]: DEBUG nova.network.neutron [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Updated VIF entry in instance network info cache for port ae035d33-feaf-43d3-a5ed-93b396819be3. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 997.682177] env[68437]: DEBUG nova.network.neutron [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Updating instance_info_cache with network_info: [{"id": "ae035d33-feaf-43d3-a5ed-93b396819be3", "address": "fa:16:3e:eb:56:72", "network": {"id": "cf5ab765-e353-4d6e-99a1-816d38d3f7ed", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1488403753-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f1c3ca0e78f472e8c127fa68ed610f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae035d33-fe", "ovs_interfaceid": "ae035d33-feaf-43d3-a5ed-93b396819be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.724693] env[68437]: DEBUG nova.network.neutron [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.846471] env[68437]: DEBUG oslo_vmware.api [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Task: {'id': task-2944546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101299} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.846646] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 997.846992] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 997.847350] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 997.847875] env[68437]: INFO nova.compute.manager [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Took 2.32 seconds to destroy the instance on the hypervisor. [ 997.848190] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 997.848537] env[68437]: DEBUG nova.compute.manager [-] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 997.848704] env[68437]: DEBUG nova.network.neutron [-] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 997.911933] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfe840b-bf29-4e57-a2fc-c085156e0827 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.926600] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944547, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.929997] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f64b64f-415f-4d11-a882-a649e9002470 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.982939] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e76e0f-27f9-403f-8df5-ca02a9fdcb41 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.992529] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a146174c-7a1f-4eeb-b6a7-a64b61d24736 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.008964] env[68437]: DEBUG nova.compute.provider_tree [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.071149] env[68437]: DEBUG nova.network.neutron [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Successfully created port: 3d83c7a6-c022-4e23-aa36-301fde0fcb79 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 998.077411] env[68437]: DEBUG nova.compute.manager [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 998.109482] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.186808] env[68437]: DEBUG oslo_concurrency.lockutils [req-98e76d52-c2b7-47f7-b646-d52abbc3e5ef req-c1bbf673-2b6c-4cd8-8b94-4b8e16a5d73f service nova] Releasing lock "refresh_cache-ae32443d-3b55-4bd7-8f07-e66d206ec1d1" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.228285] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Releasing lock "refresh_cache-7422ff70-901c-4343-9b9f-f12c52348d2c" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.228825] env[68437]: DEBUG nova.compute.manager [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 998.229147] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.230090] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6109eeca-ffe1-4177-87f9-49a00a824358 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.240746] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.241199] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bf48531-7970-47d4-b8f0-2b0fba5e2aa8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.254025] env[68437]: DEBUG oslo_vmware.api [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 998.254025] env[68437]: value = "task-2944548" [ 998.254025] env[68437]: _type = "Task" [ 998.254025] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.263187] env[68437]: DEBUG oslo_vmware.api [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944548, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.425758] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944547, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.518266] env[68437]: DEBUG nova.scheduler.client.report [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.764524] env[68437]: DEBUG oslo_vmware.api [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944548, 'name': PowerOffVM_Task, 'duration_secs': 0.14142} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.764929] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.765143] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.765461] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b19d89b1-2141-4d43-a314-fdc82bb47df7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.806284] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.806284] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.806284] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Deleting the datastore file [datastore2] 7422ff70-901c-4343-9b9f-f12c52348d2c {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.806998] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-624194a5-e17e-4875-b002-7cf1839b6789 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.818233] env[68437]: DEBUG oslo_vmware.api [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for the task: (returnval){ [ 998.818233] env[68437]: value = "task-2944550" [ 998.818233] env[68437]: _type = "Task" [ 998.818233] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.830621] env[68437]: DEBUG oslo_vmware.api [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.851395] env[68437]: DEBUG nova.compute.manager [req-097a1eb2-1795-4ddd-8794-54085a02b973 req-cc7b74a1-ced7-4bb6-9e05-bbb429899f13 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Received event network-vif-deleted-9edc8a0b-761d-4911-904e-9cb4a163bf7e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 998.851599] env[68437]: INFO nova.compute.manager [req-097a1eb2-1795-4ddd-8794-54085a02b973 req-cc7b74a1-ced7-4bb6-9e05-bbb429899f13 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Neutron deleted interface 9edc8a0b-761d-4911-904e-9cb4a163bf7e; detaching it from the instance and deleting it from the info cache [ 998.851846] env[68437]: DEBUG nova.network.neutron [req-097a1eb2-1795-4ddd-8794-54085a02b973 req-cc7b74a1-ced7-4bb6-9e05-bbb429899f13 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.925845] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944547, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.022137] env[68437]: DEBUG oslo_concurrency.lockutils [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.962s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.024796] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.071s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.025196] env[68437]: DEBUG nova.objects.instance [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lazy-loading 'resources' on Instance uuid 2f368262-0825-4ccc-9b1e-523b705bcfce {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.057061] env[68437]: INFO nova.scheduler.client.report [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted allocations for instance d84c599e-29b2-45ec-a3f7-54ef85af9a3d [ 999.090567] env[68437]: DEBUG nova.compute.manager [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 999.116906] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 999.117372] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.117646] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 999.117955] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.118192] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 999.118468] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 999.118859] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 999.119108] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 999.119366] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 999.119605] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 999.119863] env[68437]: DEBUG nova.virt.hardware [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 999.120545] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.122387] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b51335a-761d-4ed8-b76b-c3842b957a93 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.126418] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b268e3f6-1c65-4fc5-b1a0-b7690042c9ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.138081] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb86b39-d4bb-4e29-9f15-f3b7c0915267 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.144029] env[68437]: DEBUG oslo_vmware.api [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 999.144029] env[68437]: value = "task-2944551" [ 999.144029] env[68437]: _type = "Task" [ 999.144029] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.168184] env[68437]: DEBUG oslo_vmware.api [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944551, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.326627] env[68437]: DEBUG oslo_vmware.api [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Task: {'id': task-2944550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156609} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.326901] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.328028] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 999.328028] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.328028] env[68437]: INFO nova.compute.manager [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Took 1.10 seconds to destroy the instance on the hypervisor. [ 999.328028] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.328028] env[68437]: DEBUG nova.compute.manager [-] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 999.328028] env[68437]: DEBUG nova.network.neutron [-] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 999.334214] env[68437]: DEBUG nova.network.neutron [-] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.345392] env[68437]: DEBUG nova.network.neutron [-] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 999.355109] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5dd3ad8-f96b-462a-969a-b7587e2c296e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.366686] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc91c87-cdb7-48dd-8882-2c21a39494f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.403710] env[68437]: DEBUG nova.compute.manager [req-097a1eb2-1795-4ddd-8794-54085a02b973 req-cc7b74a1-ced7-4bb6-9e05-bbb429899f13 service nova] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Detach interface failed, port_id=9edc8a0b-761d-4911-904e-9cb4a163bf7e, reason: Instance 29e9555b-f928-43e7-a3a3-869ed07d7326 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 999.426847] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944547, 'name': CreateVM_Task, 'duration_secs': 1.554801} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.427026] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.427693] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.427859] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.428208] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 999.428474] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-808eea0f-5053-4fe1-affc-64d33cf28999 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.434032] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 999.434032] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52737db1-09f3-ea45-ca9f-dcbad5680641" [ 999.434032] env[68437]: _type = "Task" [ 999.434032] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.443310] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52737db1-09f3-ea45-ca9f-dcbad5680641, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.547819] env[68437]: DEBUG nova.scheduler.client.report [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 999.562641] env[68437]: DEBUG oslo_concurrency.lockutils [None req-599b8ac4-f32f-49fd-b539-522d340c58f5 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "d84c599e-29b2-45ec-a3f7-54ef85af9a3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.415s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.568747] env[68437]: DEBUG nova.scheduler.client.report [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 999.569057] env[68437]: DEBUG nova.compute.provider_tree [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 999.582894] env[68437]: DEBUG nova.scheduler.client.report [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 999.604460] env[68437]: DEBUG nova.scheduler.client.report [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 999.655688] env[68437]: DEBUG oslo_vmware.api [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944551, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.798666] env[68437]: DEBUG nova.network.neutron [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Successfully updated port: 3d83c7a6-c022-4e23-aa36-301fde0fcb79 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 999.836682] env[68437]: INFO nova.compute.manager [-] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Took 1.99 seconds to deallocate network for instance. [ 999.847785] env[68437]: DEBUG nova.network.neutron [-] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.923413] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffaf7eb-0791-4a96-a052-032cf045b1e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.932134] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e84c1e-6106-4482-af2a-a5ddc561bde1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.944332] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52737db1-09f3-ea45-ca9f-dcbad5680641, 'name': SearchDatastore_Task, 'duration_secs': 0.016929} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.969511] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.969802] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 999.970057] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.970211] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.970391] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.971245] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47d569c6-42d6-49ba-8118-b74e63cc5248 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.973868] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928fb9d8-f676-4289-b623-c2a084954052 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.982707] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ef087e-14df-4b86-a839-ab2c73c26fc1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.987489] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.987683] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 999.988748] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92025095-e2cd-4288-841e-12e2abc212ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.001147] env[68437]: DEBUG nova.compute.provider_tree [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.006290] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1000.006290] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52900949-124d-37c6-b178-a752fed513ad" [ 1000.006290] env[68437]: _type = "Task" [ 1000.006290] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.015789] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52900949-124d-37c6-b178-a752fed513ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.155894] env[68437]: DEBUG oslo_vmware.api [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944551, 'name': PowerOnVM_Task, 'duration_secs': 0.552606} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.156217] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.156379] env[68437]: DEBUG nova.compute.manager [None req-f67445b7-6612-462a-9888-6d16a9c75a64 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.157189] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea8ba99-74a0-40ff-9793-fd343ee485f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.208044] env[68437]: DEBUG oslo_concurrency.lockutils [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquiring lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.208044] env[68437]: DEBUG oslo_concurrency.lockutils [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.208044] env[68437]: DEBUG oslo_concurrency.lockutils [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquiring lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.208613] env[68437]: DEBUG oslo_concurrency.lockutils [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.208613] env[68437]: DEBUG oslo_concurrency.lockutils [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.210605] env[68437]: INFO nova.compute.manager [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Terminating instance [ 1000.300010] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-05e07d7c-0161-463c-89f7-1bf28f680bde" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.300190] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-05e07d7c-0161-463c-89f7-1bf28f680bde" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.300349] env[68437]: DEBUG nova.network.neutron [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1000.355568] env[68437]: INFO nova.compute.manager [-] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Took 1.03 seconds to deallocate network for instance. [ 1000.391377] env[68437]: INFO nova.compute.manager [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Took 0.55 seconds to detach 1 volumes for instance. [ 1000.393734] env[68437]: DEBUG nova.compute.manager [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Deleting volume: d01c66e3-87e9-40bc-95f1-9f03e1a6d5a7 {{(pid=68437) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1000.504539] env[68437]: DEBUG nova.scheduler.client.report [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1000.519357] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52900949-124d-37c6-b178-a752fed513ad, 'name': SearchDatastore_Task, 'duration_secs': 0.032312} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.520281] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16a55f6b-8ea0-4a9c-bde5-091a56453539 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.527168] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1000.527168] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a9522b-c6c1-9c71-aaa7-42f461f5cda2" [ 1000.527168] env[68437]: _type = "Task" [ 1000.527168] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.537105] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a9522b-c6c1-9c71-aaa7-42f461f5cda2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.714845] env[68437]: DEBUG nova.compute.manager [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.715155] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.716184] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d29dde-2acf-4475-8024-7ddb125bdb4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.725064] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.725364] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60035654-f949-47f4-a336-84b5b0e3d748 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.733547] env[68437]: DEBUG oslo_vmware.api [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 1000.733547] env[68437]: value = "task-2944553" [ 1000.733547] env[68437]: _type = "Task" [ 1000.733547] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.744916] env[68437]: DEBUG oslo_vmware.api [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944553, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.837751] env[68437]: DEBUG nova.network.neutron [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1000.861338] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.879815] env[68437]: DEBUG nova.compute.manager [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Received event network-vif-plugged-3d83c7a6-c022-4e23-aa36-301fde0fcb79 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1000.880054] env[68437]: DEBUG oslo_concurrency.lockutils [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] Acquiring lock "05e07d7c-0161-463c-89f7-1bf28f680bde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.880269] env[68437]: DEBUG oslo_concurrency.lockutils [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] Lock "05e07d7c-0161-463c-89f7-1bf28f680bde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.880436] env[68437]: DEBUG oslo_concurrency.lockutils [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] Lock "05e07d7c-0161-463c-89f7-1bf28f680bde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.880605] env[68437]: DEBUG nova.compute.manager [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] No waiting events found dispatching network-vif-plugged-3d83c7a6-c022-4e23-aa36-301fde0fcb79 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1000.880771] env[68437]: WARNING nova.compute.manager [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Received unexpected event network-vif-plugged-3d83c7a6-c022-4e23-aa36-301fde0fcb79 for instance with vm_state building and task_state spawning. [ 1000.880945] env[68437]: DEBUG nova.compute.manager [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Received event network-changed-3d83c7a6-c022-4e23-aa36-301fde0fcb79 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1000.881120] env[68437]: DEBUG nova.compute.manager [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Refreshing instance network info cache due to event network-changed-3d83c7a6-c022-4e23-aa36-301fde0fcb79. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1000.881288] env[68437]: DEBUG oslo_concurrency.lockutils [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] Acquiring lock "refresh_cache-05e07d7c-0161-463c-89f7-1bf28f680bde" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.935695] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.999988] env[68437]: DEBUG nova.network.neutron [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Updating instance_info_cache with network_info: [{"id": "3d83c7a6-c022-4e23-aa36-301fde0fcb79", "address": "fa:16:3e:91:c1:17", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d83c7a6-c0", "ovs_interfaceid": "3d83c7a6-c022-4e23-aa36-301fde0fcb79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.009944] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.985s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.012631] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.664s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.014252] env[68437]: INFO nova.compute.claims [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1001.039608] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a9522b-c6c1-9c71-aaa7-42f461f5cda2, 'name': SearchDatastore_Task, 'duration_secs': 0.016621} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.039909] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.040774] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ae32443d-3b55-4bd7-8f07-e66d206ec1d1/ae32443d-3b55-4bd7-8f07-e66d206ec1d1.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1001.040774] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89e13a34-290d-4941-9441-7e27a900d4f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.044943] env[68437]: INFO nova.scheduler.client.report [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Deleted allocations for instance 2f368262-0825-4ccc-9b1e-523b705bcfce [ 1001.050517] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1001.050517] env[68437]: value = "task-2944554" [ 1001.050517] env[68437]: _type = "Task" [ 1001.050517] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.060782] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.243786] env[68437]: DEBUG oslo_vmware.api [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944553, 'name': PowerOffVM_Task, 'duration_secs': 0.311879} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.244208] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.244366] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.244665] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1af91b8-508d-4732-b2a4-90a0de866efd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.328814] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.329153] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.329400] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Deleting the datastore file [datastore2] 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.329702] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78578d1f-d4d9-495c-a7d9-370f2e3884b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.341036] env[68437]: DEBUG oslo_vmware.api [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for the task: (returnval){ [ 1001.341036] env[68437]: value = "task-2944556" [ 1001.341036] env[68437]: _type = "Task" [ 1001.341036] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.350043] env[68437]: DEBUG oslo_vmware.api [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944556, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.503689] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-05e07d7c-0161-463c-89f7-1bf28f680bde" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.503689] env[68437]: DEBUG nova.compute.manager [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Instance network_info: |[{"id": "3d83c7a6-c022-4e23-aa36-301fde0fcb79", "address": "fa:16:3e:91:c1:17", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d83c7a6-c0", "ovs_interfaceid": "3d83c7a6-c022-4e23-aa36-301fde0fcb79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1001.503994] env[68437]: DEBUG oslo_concurrency.lockutils [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] Acquired lock "refresh_cache-05e07d7c-0161-463c-89f7-1bf28f680bde" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.504134] env[68437]: DEBUG nova.network.neutron [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Refreshing network info cache for port 3d83c7a6-c022-4e23-aa36-301fde0fcb79 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1001.505688] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:c1:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d83c7a6-c022-4e23-aa36-301fde0fcb79', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.513776] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Creating folder: Project (38ad479949b24307b08e16fdb821c76f). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.517427] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db9f5b19-b64b-49a3-b966-305405f77ee4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.539062] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Created folder: Project (38ad479949b24307b08e16fdb821c76f) in parent group-v590848. [ 1001.539250] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Creating folder: Instances. Parent ref: group-v591053. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.539493] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16ffaf0d-00b2-48c1-ab2f-693a39f2225b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.557443] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2271cf84-b241-4d1b-9121-cfc3664a5d04 tempest-ServersAdminTestJSON-1809340153 tempest-ServersAdminTestJSON-1809340153-project-member] Lock "2f368262-0825-4ccc-9b1e-523b705bcfce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.160s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.562339] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Created folder: Instances in parent group-v591053. [ 1001.562590] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.563479] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.564374] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eaaa92fc-8ff2-4201-b965-11ff27de9bd4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.586066] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944554, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.595342] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.595342] env[68437]: value = "task-2944559" [ 1001.595342] env[68437]: _type = "Task" [ 1001.595342] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.607358] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944559, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.770474] env[68437]: DEBUG nova.network.neutron [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Updated VIF entry in instance network info cache for port 3d83c7a6-c022-4e23-aa36-301fde0fcb79. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1001.770847] env[68437]: DEBUG nova.network.neutron [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Updating instance_info_cache with network_info: [{"id": "3d83c7a6-c022-4e23-aa36-301fde0fcb79", "address": "fa:16:3e:91:c1:17", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d83c7a6-c0", "ovs_interfaceid": "3d83c7a6-c022-4e23-aa36-301fde0fcb79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.851411] env[68437]: DEBUG oslo_vmware.api [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Task: {'id': task-2944556, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.350038} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.851736] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.851986] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.852269] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.852481] env[68437]: INFO nova.compute.manager [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1001.852763] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.852991] env[68437]: DEBUG nova.compute.manager [-] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.853131] env[68437]: DEBUG nova.network.neutron [-] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1002.065837] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.705485} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.065837] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ae32443d-3b55-4bd7-8f07-e66d206ec1d1/ae32443d-3b55-4bd7-8f07-e66d206ec1d1.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1002.066028] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1002.066423] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01aaa82f-aa89-439f-828c-f7119fce8e94 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.077305] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1002.077305] env[68437]: value = "task-2944560" [ 1002.077305] env[68437]: _type = "Task" [ 1002.077305] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.088507] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944560, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.108539] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944559, 'name': CreateVM_Task, 'duration_secs': 0.449449} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.108741] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1002.109470] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.109624] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.109956] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1002.110233] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-826ec92c-710b-44ae-ab92-a6bde32ee222 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.116737] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1002.116737] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bb8540-3f09-d5ca-2ec5-fd019f6f82f3" [ 1002.116737] env[68437]: _type = "Task" [ 1002.116737] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.131365] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bb8540-3f09-d5ca-2ec5-fd019f6f82f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.273642] env[68437]: DEBUG oslo_concurrency.lockutils [req-8fc78d74-ce2d-4578-9b75-1f94dc1985fa req-d172aac6-cdb1-4182-bccd-8fc7d530bf4d service nova] Releasing lock "refresh_cache-05e07d7c-0161-463c-89f7-1bf28f680bde" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.311686] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64ee780-86a4-4a37-9e98-416834068c38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.322118] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986b3cd4-2813-48a5-aec8-8bda418d50fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.362672] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725f4136-e116-4ec5-a170-04e04e7e3cd8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.371441] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff1323e2-9e77-4ab6-8989-c10071d4004e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.385898] env[68437]: DEBUG nova.compute.provider_tree [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.440672] env[68437]: DEBUG oslo_concurrency.lockutils [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.441431] env[68437]: DEBUG oslo_concurrency.lockutils [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.587756] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944560, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082482} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.587933] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1002.588991] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0f47ae-3d67-461e-a3ee-dd9e76b53d91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.614201] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] ae32443d-3b55-4bd7-8f07-e66d206ec1d1/ae32443d-3b55-4bd7-8f07-e66d206ec1d1.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.614886] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abb08442-28b8-44f6-8e69-81836853b5ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.640329] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bb8540-3f09-d5ca-2ec5-fd019f6f82f3, 'name': SearchDatastore_Task, 'duration_secs': 0.015804} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.641795] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.642875] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.643187] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.643284] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.643535] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.643898] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1002.643898] env[68437]: value = "task-2944561" [ 1002.643898] env[68437]: _type = "Task" [ 1002.643898] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.644176] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5651b21d-7604-4ec7-a1b3-77144a8ed9c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.656429] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944561, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.658308] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.658453] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.659174] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e18df705-837f-4acd-bdfa-546644770fd5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.665161] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1002.665161] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525bf0a4-b449-25bd-a3de-9b07d99a2d41" [ 1002.665161] env[68437]: _type = "Task" [ 1002.665161] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.676687] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525bf0a4-b449-25bd-a3de-9b07d99a2d41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.890422] env[68437]: DEBUG nova.scheduler.client.report [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.921265] env[68437]: DEBUG nova.compute.manager [req-98bbd70f-0a20-4100-b944-014383146dcd req-494d5a14-aa11-4614-9d9f-e5ffd5d0fe9c service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Received event network-vif-deleted-8aea055c-08c3-4b2a-ba4e-4aa831098aff {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1002.921542] env[68437]: INFO nova.compute.manager [req-98bbd70f-0a20-4100-b944-014383146dcd req-494d5a14-aa11-4614-9d9f-e5ffd5d0fe9c service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Neutron deleted interface 8aea055c-08c3-4b2a-ba4e-4aa831098aff; detaching it from the instance and deleting it from the info cache [ 1002.921676] env[68437]: DEBUG nova.network.neutron [req-98bbd70f-0a20-4100-b944-014383146dcd req-494d5a14-aa11-4614-9d9f-e5ffd5d0fe9c service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.944997] env[68437]: DEBUG nova.compute.utils [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1003.061180] env[68437]: DEBUG nova.network.neutron [-] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.157407] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944561, 'name': ReconfigVM_Task, 'duration_secs': 0.367762} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.157711] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Reconfigured VM instance instance-0000004b to attach disk [datastore1] ae32443d-3b55-4bd7-8f07-e66d206ec1d1/ae32443d-3b55-4bd7-8f07-e66d206ec1d1.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.159274] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16e9e010-8453-4269-8ff5-9a652657ddb6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.167458] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1003.167458] env[68437]: value = "task-2944562" [ 1003.167458] env[68437]: _type = "Task" [ 1003.167458] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.184521] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525bf0a4-b449-25bd-a3de-9b07d99a2d41, 'name': SearchDatastore_Task, 'duration_secs': 0.018008} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.184521] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944562, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.185993] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d5a10e7-af19-4f8b-a2de-56cf186b43e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.192414] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1003.192414] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52771065-b3c9-508e-7dd5-8fb3c2e565ce" [ 1003.192414] env[68437]: _type = "Task" [ 1003.192414] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.203233] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52771065-b3c9-508e-7dd5-8fb3c2e565ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.402021] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.402021] env[68437]: DEBUG nova.compute.manager [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1003.403309] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.474s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.424372] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee022c8e-6dd8-4b11-9188-1614aa3d3433 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.437149] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999769c5-fe78-40b2-a83b-4a3991b9e61e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.449205] env[68437]: DEBUG oslo_concurrency.lockutils [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.478282] env[68437]: DEBUG nova.compute.manager [req-98bbd70f-0a20-4100-b944-014383146dcd req-494d5a14-aa11-4614-9d9f-e5ffd5d0fe9c service nova] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Detach interface failed, port_id=8aea055c-08c3-4b2a-ba4e-4aa831098aff, reason: Instance 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1003.564628] env[68437]: INFO nova.compute.manager [-] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Took 1.71 seconds to deallocate network for instance. [ 1003.684159] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944562, 'name': Rename_Task, 'duration_secs': 0.198926} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.684523] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1003.684796] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7df05de5-c28c-4225-b4f9-604f1613e847 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.699187] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1003.699187] env[68437]: value = "task-2944563" [ 1003.699187] env[68437]: _type = "Task" [ 1003.699187] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.709872] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52771065-b3c9-508e-7dd5-8fb3c2e565ce, 'name': SearchDatastore_Task, 'duration_secs': 0.024774} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.711091] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.712920] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 05e07d7c-0161-463c-89f7-1bf28f680bde/05e07d7c-0161-463c-89f7-1bf28f680bde.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1003.717680] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cb1e0e0-e0ec-44bc-8781-6d55c63a444f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.720560] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944563, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.729101] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1003.729101] env[68437]: value = "task-2944564" [ 1003.729101] env[68437]: _type = "Task" [ 1003.729101] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.739938] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944564, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.907537] env[68437]: DEBUG nova.compute.utils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1003.915430] env[68437]: INFO nova.compute.claims [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1003.927021] env[68437]: DEBUG nova.compute.manager [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1003.927679] env[68437]: DEBUG nova.compute.manager [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1004.070764] env[68437]: DEBUG oslo_concurrency.lockutils [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.215022] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944563, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.241896] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944564, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.435683] env[68437]: INFO nova.compute.resource_tracker [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating resource usage from migration cc5d69af-8701-41e6-9eac-99856916861a [ 1004.536595] env[68437]: DEBUG oslo_concurrency.lockutils [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.536867] env[68437]: DEBUG oslo_concurrency.lockutils [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.537114] env[68437]: INFO nova.compute.manager [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Attaching volume 4f284da8-83f9-4c81-9fc5-d241f2cbc05b to /dev/sdb [ 1004.583178] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f597f5-5e41-4a18-9978-9ad3a1377345 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.590390] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a81dd2-5e13-4f3f-8aa5-a519efc9f012 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.614480] env[68437]: DEBUG nova.virt.block_device [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updating existing volume attachment record: 92bdff21-6012-4873-b1c8-083837fea686 {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1004.710838] env[68437]: DEBUG oslo_vmware.api [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944563, 'name': PowerOnVM_Task, 'duration_secs': 0.530007} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.713565] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1004.714051] env[68437]: INFO nova.compute.manager [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Took 9.99 seconds to spawn the instance on the hypervisor. [ 1004.714296] env[68437]: DEBUG nova.compute.manager [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1004.715328] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb31e16-4711-44e4-b702-e45e006089e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.740794] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944564, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.685476} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.741121] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 05e07d7c-0161-463c-89f7-1bf28f680bde/05e07d7c-0161-463c-89f7-1bf28f680bde.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1004.741346] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.741607] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3aa7b8c-a806-42dc-be79-bdd596853ea4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.753272] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1004.753272] env[68437]: value = "task-2944565" [ 1004.753272] env[68437]: _type = "Task" [ 1004.753272] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.761689] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9653761-b969-44f7-ab92-97daa7093a3d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.768128] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944565, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.777632] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518ee78e-ce84-4ee2-9191-5cd198c78b71 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.817302] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7098665f-63a5-4082-8ab9-2e9b02804ed7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.826998] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff029e5b-a728-4713-abf3-b8564a209f50 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.844013] env[68437]: DEBUG nova.compute.provider_tree [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.940795] env[68437]: DEBUG nova.compute.manager [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1004.973819] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1004.974109] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.974305] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1004.974506] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.974654] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1004.974803] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1004.975050] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1004.975261] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1004.975523] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1004.975772] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1004.975981] env[68437]: DEBUG nova.virt.hardware [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1004.977022] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47b36a2-3508-434f-9bf0-53181afd1ec7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.985766] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785d3ade-72b9-4cbb-acf6-51335bd13fb9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.001636] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1005.008550] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Creating folder: Project (c784df42c8d7400c8aa0014ad5f1db52). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1005.008987] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92c61922-8189-4758-b60e-5ed9365bee38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.023892] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Created folder: Project (c784df42c8d7400c8aa0014ad5f1db52) in parent group-v590848. [ 1005.024282] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Creating folder: Instances. Parent ref: group-v591057. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1005.024725] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8eb6d1f1-c536-41af-82c8-30419444e63b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.038393] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Created folder: Instances in parent group-v591057. [ 1005.038691] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1005.038932] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1005.039180] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5640501a-6356-4a66-9fbe-36bec78d47af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.058018] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1005.058018] env[68437]: value = "task-2944570" [ 1005.058018] env[68437]: _type = "Task" [ 1005.058018] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.074067] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944570, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.239760] env[68437]: INFO nova.compute.manager [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Took 18.49 seconds to build instance. [ 1005.267771] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944565, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136691} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.268830] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.272365] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5822860a-9ef7-4168-a2fb-e48bc6cfdeda {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.299860] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 05e07d7c-0161-463c-89f7-1bf28f680bde/05e07d7c-0161-463c-89f7-1bf28f680bde.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.300384] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50f30f8c-6dd2-4387-85cb-0d31ece8ae3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.324762] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1005.324762] env[68437]: value = "task-2944572" [ 1005.324762] env[68437]: _type = "Task" [ 1005.324762] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.339867] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944572, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.347230] env[68437]: DEBUG nova.scheduler.client.report [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1005.571038] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944570, 'name': CreateVM_Task, 'duration_secs': 0.422602} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.571368] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1005.571633] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.571788] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.572190] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1005.572459] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0b97a2f-21b4-4302-8fcb-bf6b3a75bbbd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.580700] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1005.580700] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c814d9-6a38-d3f6-d6bf-18ee03a15845" [ 1005.580700] env[68437]: _type = "Task" [ 1005.580700] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.588841] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c814d9-6a38-d3f6-d6bf-18ee03a15845, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.742228] env[68437]: DEBUG oslo_concurrency.lockutils [None req-58e6bc1b-df89-4f10-9828-53baebef56ed tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.996s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.837887] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944572, 'name': ReconfigVM_Task, 'duration_secs': 0.508571} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.838016] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 05e07d7c-0161-463c-89f7-1bf28f680bde/05e07d7c-0161-463c-89f7-1bf28f680bde.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.839353] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72e87ed9-bc52-483d-b707-b2dc44d3657a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.848050] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1005.848050] env[68437]: value = "task-2944573" [ 1005.848050] env[68437]: _type = "Task" [ 1005.848050] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.852353] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.449s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.852549] env[68437]: INFO nova.compute.manager [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Migrating [ 1005.864177] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.354s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.864470] env[68437]: DEBUG nova.objects.instance [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lazy-loading 'resources' on Instance uuid ada623a8-b0ce-4709-b2af-ad80b464af4e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.866429] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944573, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.091947] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c814d9-6a38-d3f6-d6bf-18ee03a15845, 'name': SearchDatastore_Task, 'duration_secs': 0.016534} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.092123] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.092521] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1006.093455] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.093455] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.093455] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1006.093616] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec8130e4-3178-4f75-9383-9c3794067c42 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.106803] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1006.107092] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1006.107856] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7779a7aa-4b00-46f9-90ce-ce2b54d6d9cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.114791] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1006.114791] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527ec1d8-0350-eb8d-a823-3028010232ad" [ 1006.114791] env[68437]: _type = "Task" [ 1006.114791] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.124292] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527ec1d8-0350-eb8d-a823-3028010232ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.361099] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944573, 'name': Rename_Task, 'duration_secs': 0.29641} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.361468] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.361727] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2845b3b6-4572-4395-9de4-6e182359e3f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.372021] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1006.372021] env[68437]: value = "task-2944574" [ 1006.372021] env[68437]: _type = "Task" [ 1006.372021] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.375128] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.375295] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.375760] env[68437]: DEBUG nova.network.neutron [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1006.383148] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944574, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.632028] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527ec1d8-0350-eb8d-a823-3028010232ad, 'name': SearchDatastore_Task, 'duration_secs': 0.02543} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.633124] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa87f397-1b9b-4d68-ada3-93197640372e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.642721] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1006.642721] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527b8ed4-946c-c806-e1d0-ec2302fe00e6" [ 1006.642721] env[68437]: _type = "Task" [ 1006.642721] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.656647] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527b8ed4-946c-c806-e1d0-ec2302fe00e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.678191] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa939463-aa9f-480c-8f4a-1288fec99450 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.686959] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71d0060-a302-4b40-a111-dbcad0205786 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.723192] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb645ba9-92f9-4757-99a7-3313e1032c0a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.733110] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e703bed-cf63-426d-8d3c-edf8e081ab8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.748761] env[68437]: DEBUG nova.compute.provider_tree [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.885127] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944574, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.117936] env[68437]: DEBUG nova.network.neutron [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance_info_cache with network_info: [{"id": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "address": "fa:16:3e:59:da:22", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9c19590-2f", "ovs_interfaceid": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.155835] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527b8ed4-946c-c806-e1d0-ec2302fe00e6, 'name': SearchDatastore_Task, 'duration_secs': 0.015568} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.156168] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.156516] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] e51356e4-7647-4678-bb4f-f069b5c7fef6/e51356e4-7647-4678-bb4f-f069b5c7fef6.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1007.156847] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffc76763-48f3-4524-aa2e-9e4408326920 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.165554] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1007.165554] env[68437]: value = "task-2944576" [ 1007.165554] env[68437]: _type = "Task" [ 1007.165554] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.176732] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944576, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.228335] env[68437]: DEBUG nova.compute.manager [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1007.229563] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef21211-455e-4922-bd9c-4f9fe5c1bc74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.251601] env[68437]: DEBUG nova.scheduler.client.report [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.384870] env[68437]: DEBUG oslo_vmware.api [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944574, 'name': PowerOnVM_Task, 'duration_secs': 0.635546} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.385453] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.385734] env[68437]: INFO nova.compute.manager [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Took 8.29 seconds to spawn the instance on the hypervisor. [ 1007.385956] env[68437]: DEBUG nova.compute.manager [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1007.386886] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172fb40b-18b9-468b-80f7-a9a431ab318c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.621352] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.677985] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944576, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.742688] env[68437]: INFO nova.compute.manager [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] instance snapshotting [ 1007.747958] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0291fc76-1247-44e0-b006-934a4308eb0f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.769128] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.905s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.771672] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.687s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.771931] env[68437]: DEBUG nova.objects.instance [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lazy-loading 'resources' on Instance uuid 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.773893] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acad20a-e8ef-4889-b96d-b64849dc1cd1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.800045] env[68437]: INFO nova.scheduler.client.report [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Deleted allocations for instance ada623a8-b0ce-4709-b2af-ad80b464af4e [ 1007.904276] env[68437]: INFO nova.compute.manager [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Took 19.53 seconds to build instance. [ 1007.950967] env[68437]: DEBUG oslo_vmware.rw_handles [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ce3581-1768-e4fc-6f5c-59adaa935a3b/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1007.951913] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d7c6dc-f6a0-429f-8be8-c0564a2989e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.962092] env[68437]: DEBUG oslo_vmware.rw_handles [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ce3581-1768-e4fc-6f5c-59adaa935a3b/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1007.962715] env[68437]: ERROR oslo_vmware.rw_handles [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ce3581-1768-e4fc-6f5c-59adaa935a3b/disk-0.vmdk due to incomplete transfer. [ 1007.962715] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-472099d1-4573-45e0-ada5-357d3eecf274 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.971704] env[68437]: DEBUG oslo_vmware.rw_handles [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ce3581-1768-e4fc-6f5c-59adaa935a3b/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1007.971915] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Uploaded image 0d9ddd6f-fb4b-4dd5-bcab-7a4edd9c7848 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1007.974282] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1007.974569] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2bc9dd66-2da8-4c54-aa42-04bde6d62b87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.984211] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 1007.984211] env[68437]: value = "task-2944577" [ 1007.984211] env[68437]: _type = "Task" [ 1007.984211] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.991791] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944577, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.178926] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944576, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602418} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.180257] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] e51356e4-7647-4678-bb4f-f069b5c7fef6/e51356e4-7647-4678-bb4f-f069b5c7fef6.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1008.180449] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1008.180739] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86015018-99c8-4b71-928e-04c926f6a5f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.190305] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1008.190305] env[68437]: value = "task-2944578" [ 1008.190305] env[68437]: _type = "Task" [ 1008.190305] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.201459] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944578, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.292930] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1008.293402] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-921f8e28-d3b0-4025-885c-d5d514ca2a3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.310446] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1008.310446] env[68437]: value = "task-2944579" [ 1008.310446] env[68437]: _type = "Task" [ 1008.310446] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.310446] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2f54f79a-f2f5-47e1-a9f6-a60863633c07 tempest-ServersTestMultiNic-268970127 tempest-ServersTestMultiNic-268970127-project-member] Lock "ada623a8-b0ce-4709-b2af-ad80b464af4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.686s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.327045] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944579, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.406141] env[68437]: DEBUG oslo_concurrency.lockutils [None req-07a16b5a-9495-4319-97cb-8868a66a077a tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "05e07d7c-0161-463c-89f7-1bf28f680bde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.045s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.494809] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944577, 'name': Destroy_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.607669] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d56ac4-2c1b-4e4d-991d-4387bce5c238 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.620915] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95b59ca-e0c4-4ad3-b755-14d5dc110d89 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.660490] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1884fbf4-e81b-47d6-b7f8-117f2aea6b13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.666496] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.666800] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.674539] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d36000-686e-479c-b468-e170ede53037 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.692469] env[68437]: DEBUG nova.compute.provider_tree [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.703751] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944578, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073983} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.704132] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1008.704987] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f827441-321a-4a1d-aa7c-d529d5cbfff6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.727546] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] e51356e4-7647-4678-bb4f-f069b5c7fef6/e51356e4-7647-4678-bb4f-f069b5c7fef6.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1008.728126] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebc3ab82-c99e-4038-a48b-d36a16c7cbf9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.750035] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1008.750035] env[68437]: value = "task-2944580" [ 1008.750035] env[68437]: _type = "Task" [ 1008.750035] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.759335] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944580, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.823109] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944579, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.994864] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944577, 'name': Destroy_Task, 'duration_secs': 0.637979} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.995297] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Destroyed the VM [ 1008.995691] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1008.996102] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ed8fc67a-79a9-4fb0-b8ce-743efcde81e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.006207] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 1009.006207] env[68437]: value = "task-2944581" [ 1009.006207] env[68437]: _type = "Task" [ 1009.006207] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.015037] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944581, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.032701] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "05e07d7c-0161-463c-89f7-1bf28f680bde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.033161] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "05e07d7c-0161-463c-89f7-1bf28f680bde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.033265] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "05e07d7c-0161-463c-89f7-1bf28f680bde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.033385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "05e07d7c-0161-463c-89f7-1bf28f680bde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.033556] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "05e07d7c-0161-463c-89f7-1bf28f680bde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.035762] env[68437]: INFO nova.compute.manager [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Terminating instance [ 1009.169667] env[68437]: DEBUG nova.compute.manager [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1009.173165] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0373f42f-12a8-4868-b83e-8e300373b3c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.195923] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance '697d5011-fb4e-4542-851b-39953bbb293d' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1009.199818] env[68437]: DEBUG nova.scheduler.client.report [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1009.261586] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944580, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.322523] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944579, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.522310] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944581, 'name': RemoveSnapshot_Task} progress is 70%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.540577] env[68437]: DEBUG nova.compute.manager [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1009.540764] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.542009] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3059deef-0c86-49a6-aca1-400342f4bea5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.551902] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.552215] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6233b8a-2adb-4ca2-a3cf-33f803de8e7b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.561326] env[68437]: DEBUG oslo_vmware.api [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1009.561326] env[68437]: value = "task-2944582" [ 1009.561326] env[68437]: _type = "Task" [ 1009.561326] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.571129] env[68437]: DEBUG oslo_vmware.api [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944582, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.670923] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1009.671253] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591059', 'volume_id': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'name': 'volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a01364f9-e30d-4140-ae41-1e7c4aaa2251', 'attached_at': '', 'detached_at': '', 'volume_id': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'serial': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1009.672156] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f3a25a-562e-4fab-9b3a-0f9c44cc1ac9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.692737] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e54ca5-459a-4c2b-9868-6c80b4f0b78b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.714442] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.714828] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.724780] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b/volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1009.725883] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.727383] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c892ad69-f323-4da2-9b4d-144a0cb26a0d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.728150] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.482s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.728331] env[68437]: DEBUG nova.objects.instance [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1009.730916] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df76608c-e25e-4156-b11b-f6c216a83a48 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.749693] env[68437]: INFO nova.scheduler.client.report [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Deleted allocations for instance 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e [ 1009.758685] env[68437]: DEBUG oslo_vmware.api [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1009.758685] env[68437]: value = "task-2944584" [ 1009.758685] env[68437]: _type = "Task" [ 1009.758685] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.758974] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1009.758974] env[68437]: value = "task-2944583" [ 1009.758974] env[68437]: _type = "Task" [ 1009.758974] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.770138] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944580, 'name': ReconfigVM_Task, 'duration_secs': 0.754358} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.772059] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Reconfigured VM instance instance-0000004d to attach disk [datastore2] e51356e4-7647-4678-bb4f-f069b5c7fef6/e51356e4-7647-4678-bb4f-f069b5c7fef6.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1009.772216] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06299c5c-d4ff-48ee-964a-cb7e5a922736 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.778818] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.782669] env[68437]: DEBUG oslo_vmware.api [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944584, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.788772] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1009.788772] env[68437]: value = "task-2944585" [ 1009.788772] env[68437]: _type = "Task" [ 1009.788772] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.803406] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944585, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.824409] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944579, 'name': CreateSnapshot_Task, 'duration_secs': 1.248296} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.826311] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1009.827171] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db094acb-037e-4514-afef-835417fbeb52 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.021179] env[68437]: DEBUG oslo_vmware.api [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944581, 'name': RemoveSnapshot_Task, 'duration_secs': 0.660417} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.021817] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1010.022414] env[68437]: INFO nova.compute.manager [None req-0c607154-2ab1-4d32-a0dd-53747745cdba tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Took 17.51 seconds to snapshot the instance on the hypervisor. [ 1010.076390] env[68437]: DEBUG oslo_vmware.api [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944582, 'name': PowerOffVM_Task, 'duration_secs': 0.310968} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.076717] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.076914] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.077227] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b65b34d-2cbf-45e4-9f44-c3a7812d064d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.168330] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.168473] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.169575] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleting the datastore file [datastore2] 05e07d7c-0161-463c-89f7-1bf28f680bde {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.169575] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-176fbc05-2042-459f-826b-fdd08b747d3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.179476] env[68437]: DEBUG oslo_vmware.api [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1010.179476] env[68437]: value = "task-2944587" [ 1010.179476] env[68437]: _type = "Task" [ 1010.179476] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.194214] env[68437]: DEBUG oslo_vmware.api [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.271294] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8dc4747-d291-4f0d-8888-545412bd7c4f tempest-ListImageFiltersTestJSON-1775037781 tempest-ListImageFiltersTestJSON-1775037781-project-member] Lock "098010b8-b7f7-4bd1-a42c-7fc4dcaa666e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.597s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.279881] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944583, 'name': PowerOffVM_Task, 'duration_secs': 0.200007} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.283977] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.284508] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance '697d5011-fb4e-4542-851b-39953bbb293d' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1010.292191] env[68437]: DEBUG oslo_vmware.api [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944584, 'name': ReconfigVM_Task, 'duration_secs': 0.439449} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.292191] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfigured VM instance instance-0000002d to attach disk [datastore2] volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b/volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1010.303948] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fd26a9d-df88-4924-a0c9-045e18ab1f3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.323127] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944585, 'name': Rename_Task, 'duration_secs': 0.194343} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.325041] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1010.325135] env[68437]: DEBUG oslo_vmware.api [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1010.325135] env[68437]: value = "task-2944588" [ 1010.325135] env[68437]: _type = "Task" [ 1010.325135] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.325317] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-555329aa-aaec-44d8-a7b7-803afc40413f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.339767] env[68437]: DEBUG oslo_vmware.api [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944588, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.349408] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1010.349934] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1010.349934] env[68437]: value = "task-2944589" [ 1010.349934] env[68437]: _type = "Task" [ 1010.349934] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.350313] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9d9d09b5-0d64-4dff-8d75-0c6b7548e873 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.364932] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944589, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.366724] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1010.366724] env[68437]: value = "task-2944590" [ 1010.366724] env[68437]: _type = "Task" [ 1010.366724] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.377851] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944590, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.691123] env[68437]: DEBUG oslo_vmware.api [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139906} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.691419] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.691636] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.691781] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.691954] env[68437]: INFO nova.compute.manager [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1010.692213] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.692470] env[68437]: DEBUG nova.compute.manager [-] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1010.692528] env[68437]: DEBUG nova.network.neutron [-] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1010.751423] env[68437]: DEBUG oslo_concurrency.lockutils [None req-db753f3b-2ce5-4bc3-98db-ff8838b2e7c7 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.753118] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.892s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.753351] env[68437]: DEBUG nova.objects.instance [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lazy-loading 'resources' on Instance uuid 7422ff70-901c-4343-9b9f-f12c52348d2c {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.793204] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1010.793463] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1010.793625] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1010.793808] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1010.793954] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1010.794116] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1010.794767] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1010.794767] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1010.794767] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1010.795230] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1010.795230] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1010.800629] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80915210-973f-4e4a-968b-54177915a13f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.820589] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1010.820589] env[68437]: value = "task-2944591" [ 1010.820589] env[68437]: _type = "Task" [ 1010.820589] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.831708] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944591, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.847033] env[68437]: DEBUG oslo_vmware.api [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944588, 'name': ReconfigVM_Task, 'duration_secs': 0.171984} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.847405] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591059', 'volume_id': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'name': 'volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a01364f9-e30d-4140-ae41-1e7c4aaa2251', 'attached_at': '', 'detached_at': '', 'volume_id': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'serial': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1010.865954] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944589, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.883650] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944590, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.215505] env[68437]: DEBUG nova.compute.manager [req-33650fb1-2d67-4599-b9fa-6a07c01166d4 req-9353e3d8-8ed9-4798-bc71-ff3f825ba5f4 service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Received event network-vif-deleted-3d83c7a6-c022-4e23-aa36-301fde0fcb79 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1011.215706] env[68437]: INFO nova.compute.manager [req-33650fb1-2d67-4599-b9fa-6a07c01166d4 req-9353e3d8-8ed9-4798-bc71-ff3f825ba5f4 service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Neutron deleted interface 3d83c7a6-c022-4e23-aa36-301fde0fcb79; detaching it from the instance and deleting it from the info cache [ 1011.216433] env[68437]: DEBUG nova.network.neutron [req-33650fb1-2d67-4599-b9fa-6a07c01166d4 req-9353e3d8-8ed9-4798-bc71-ff3f825ba5f4 service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.337714] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944591, 'name': ReconfigVM_Task, 'duration_secs': 0.299552} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.340851] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance '697d5011-fb4e-4542-851b-39953bbb293d' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1011.364287] env[68437]: DEBUG oslo_vmware.api [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944589, 'name': PowerOnVM_Task, 'duration_secs': 0.751316} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.364595] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1011.364836] env[68437]: INFO nova.compute.manager [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Took 6.42 seconds to spawn the instance on the hypervisor. [ 1011.365038] env[68437]: DEBUG nova.compute.manager [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1011.366063] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8662b6-7cd2-4969-a6fb-30d11faa1a09 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.387029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ad1400e-9766-4127-92e7-2d5c3ef42ca4 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.387229] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ad1400e-9766-4127-92e7-2d5c3ef42ca4 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.388568] env[68437]: DEBUG nova.objects.instance [None req-4ad1400e-9766-4127-92e7-2d5c3ef42ca4 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'flavor' on Instance uuid 3f82b137-81d5-4754-b222-3cefce0b2a10 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.388812] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944590, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.552037] env[68437]: DEBUG nova.network.neutron [-] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.583569] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f557c33-0b02-4a2f-a8b8-684c307d3421 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.591334] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4ba3c5-08e3-46fa-bb39-39570d84cea1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.628861] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b14921f-1205-4908-b3d9-9e2b81c09dfe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.637049] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9359ca16-8cbe-4cc9-84fc-c7cc95f84e1d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.651621] env[68437]: DEBUG nova.compute.provider_tree [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.720268] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c040c230-6331-4968-8385-4ec0f77ee230 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.734960] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95576aad-9afd-4fb7-88dd-aded007a7ee4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.771340] env[68437]: DEBUG nova.compute.manager [req-33650fb1-2d67-4599-b9fa-6a07c01166d4 req-9353e3d8-8ed9-4798-bc71-ff3f825ba5f4 service nova] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Detach interface failed, port_id=3d83c7a6-c022-4e23-aa36-301fde0fcb79, reason: Instance 05e07d7c-0161-463c-89f7-1bf28f680bde could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1011.848111] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1011.848429] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1011.849292] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1011.849533] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1011.849896] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1011.849896] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1011.850183] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1011.850411] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1011.850605] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1011.850774] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1011.850949] env[68437]: DEBUG nova.virt.hardware [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1011.856626] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Reconfiguring VM instance instance-0000004a to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1011.856946] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d00e4cce-97f5-4a9e-bc1f-accdd310fd1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.884755] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944590, 'name': CloneVM_Task, 'duration_secs': 1.510978} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.887041] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Created linked-clone VM from snapshot [ 1011.887139] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1011.887139] env[68437]: value = "task-2944592" [ 1011.887139] env[68437]: _type = "Task" [ 1011.887139] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.888292] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a052b78-748d-4755-94d1-ae4397d8a4cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.895423] env[68437]: DEBUG nova.objects.instance [None req-4ad1400e-9766-4127-92e7-2d5c3ef42ca4 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'pci_requests' on Instance uuid 3f82b137-81d5-4754-b222-3cefce0b2a10 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.905943] env[68437]: DEBUG nova.objects.instance [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lazy-loading 'flavor' on Instance uuid a01364f9-e30d-4140-ae41-1e7c4aaa2251 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.908940] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Uploading image 5afa4611-8116-4794-96e6-c9bbdf81c4a5 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1011.917126] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944592, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.917629] env[68437]: INFO nova.compute.manager [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Took 19.58 seconds to build instance. [ 1011.933799] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1011.933799] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8695dc3b-1054-40a0-aa77-dc3e41a7f57e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.944423] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1011.944423] env[68437]: value = "task-2944593" [ 1011.944423] env[68437]: _type = "Task" [ 1011.944423] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.955443] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944593, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.056048] env[68437]: INFO nova.compute.manager [-] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Took 1.36 seconds to deallocate network for instance. [ 1012.157642] env[68437]: DEBUG nova.scheduler.client.report [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1012.400948] env[68437]: DEBUG nova.objects.base [None req-4ad1400e-9766-4127-92e7-2d5c3ef42ca4 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Object Instance<3f82b137-81d5-4754-b222-3cefce0b2a10> lazy-loaded attributes: flavor,pci_requests {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1012.400948] env[68437]: DEBUG nova.network.neutron [None req-4ad1400e-9766-4127-92e7-2d5c3ef42ca4 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1012.404655] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944592, 'name': ReconfigVM_Task, 'duration_secs': 0.299802} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.405797] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Reconfigured VM instance instance-0000004a to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1012.406972] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e99fe11-1e11-498c-8ba3-7ac955a8e289 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.421918] env[68437]: DEBUG oslo_concurrency.lockutils [None req-29b5e896-102e-4467-a6b6-f9a943c37923 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.885s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.441311] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2206452e-8df5-49bb-8b7e-2866d8a49738 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "e51356e4-7647-4678-bb4f-f069b5c7fef6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.113s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.454075] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 697d5011-fb4e-4542-851b-39953bbb293d/697d5011-fb4e-4542-851b-39953bbb293d.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1012.454075] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0121d75f-7e20-4315-941e-59ce7ad87e90 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.479183] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944593, 'name': Destroy_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.480738] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1012.480738] env[68437]: value = "task-2944594" [ 1012.480738] env[68437]: _type = "Task" [ 1012.480738] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.492151] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944594, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.545847] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ad1400e-9766-4127-92e7-2d5c3ef42ca4 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.158s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.564370] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.663471] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.910s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.666460] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.731s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.667141] env[68437]: DEBUG nova.objects.instance [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lazy-loading 'resources' on Instance uuid 29e9555b-f928-43e7-a3a3-869ed07d7326 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.688027] env[68437]: INFO nova.scheduler.client.report [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Deleted allocations for instance 7422ff70-901c-4343-9b9f-f12c52348d2c [ 1012.918657] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquiring lock "3a2dad52-63d3-46ec-ac43-3922bca3919e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.919056] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "3a2dad52-63d3-46ec-ac43-3922bca3919e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.919214] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquiring lock "3a2dad52-63d3-46ec-ac43-3922bca3919e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.919421] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "3a2dad52-63d3-46ec-ac43-3922bca3919e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.919622] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "3a2dad52-63d3-46ec-ac43-3922bca3919e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.926929] env[68437]: INFO nova.compute.manager [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Terminating instance [ 1012.969842] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944593, 'name': Destroy_Task, 'duration_secs': 0.631531} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.973025] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Destroyed the VM [ 1012.973025] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1012.973025] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d7e31d96-4728-432b-acd3-05fd157f5800 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.980575] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1012.980575] env[68437]: value = "task-2944595" [ 1012.980575] env[68437]: _type = "Task" [ 1012.980575] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.998180] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944594, 'name': ReconfigVM_Task, 'duration_secs': 0.517919} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.004857] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 697d5011-fb4e-4542-851b-39953bbb293d/697d5011-fb4e-4542-851b-39953bbb293d.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1013.005274] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance '697d5011-fb4e-4542-851b-39953bbb293d' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1013.013745] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944595, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.014057] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquiring lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.014276] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.194902] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7ba28038-4313-4e17-a73d-176aa6065a09 tempest-ServerShowV257Test-713228456 tempest-ServerShowV257Test-713228456-project-member] Lock "7422ff70-901c-4343-9b9f-f12c52348d2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.672s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.429994] env[68437]: DEBUG nova.compute.manager [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1013.430264] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1013.434022] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa3a1f5-99b8-4544-bbc6-4185b1e5e86a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.441503] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1013.442063] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffb4b186-fc2b-46ee-9b22-029ff7c093af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.451307] env[68437]: DEBUG oslo_vmware.api [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 1013.451307] env[68437]: value = "task-2944596" [ 1013.451307] env[68437]: _type = "Task" [ 1013.451307] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.471967] env[68437]: DEBUG oslo_vmware.api [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944596, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.492997] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944595, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.494947] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19efa9c-d007-4ae1-85fa-0dc7bf489d8f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.507175] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047ace0f-a579-4401-92f3-bd01ea567b46 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.546029] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab62781-1373-4a38-be6d-981d0136161a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.549454] env[68437]: DEBUG nova.compute.manager [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1013.556152] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf15745c-2159-4aad-a8a8-02645efb8798 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.590871] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed6cf5c-7f5f-4cfe-9279-d4c6abe371be {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.595477] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3895995-f66e-464a-97fb-7b15cdadd8a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.612102] env[68437]: DEBUG nova.compute.provider_tree [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.628167] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance '697d5011-fb4e-4542-851b-39953bbb293d' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1013.633136] env[68437]: DEBUG nova.scheduler.client.report [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.664595] env[68437]: INFO nova.compute.manager [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Rescuing [ 1013.664927] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.665098] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.665277] env[68437]: DEBUG nova.network.neutron [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1013.671671] env[68437]: DEBUG nova.compute.manager [None req-043f8301-713b-4cb5-a304-00dad3554032 tempest-ServerDiagnosticsV248Test-191375725 tempest-ServerDiagnosticsV248Test-191375725-project-admin] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1013.673137] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ae34b5-f5a3-4662-a109-2fef10ae5ceb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.685866] env[68437]: INFO nova.compute.manager [None req-043f8301-713b-4cb5-a304-00dad3554032 tempest-ServerDiagnosticsV248Test-191375725 tempest-ServerDiagnosticsV248Test-191375725-project-admin] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Retrieving diagnostics [ 1013.686745] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84aab78-6d4a-4b3e-b292-c6be3d4357d5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.972367] env[68437]: DEBUG oslo_vmware.api [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944596, 'name': PowerOffVM_Task, 'duration_secs': 0.278675} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.973305] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1013.973305] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1013.973546] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3bdd860a-fdd9-4380-8771-a25cfdbf7987 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.993705] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944595, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.067812] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1014.068055] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1014.068246] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Deleting the datastore file [datastore1] 3a2dad52-63d3-46ec-ac43-3922bca3919e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1014.070706] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c730ea8a-7938-4060-b969-d4b9a63aa446 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.080718] env[68437]: DEBUG oslo_vmware.api [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for the task: (returnval){ [ 1014.080718] env[68437]: value = "task-2944598" [ 1014.080718] env[68437]: _type = "Task" [ 1014.080718] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.090874] env[68437]: DEBUG oslo_vmware.api [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.100911] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.139492] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.473s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.146781] env[68437]: DEBUG oslo_concurrency.lockutils [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.076s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.147169] env[68437]: DEBUG nova.objects.instance [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lazy-loading 'resources' on Instance uuid 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.165810] env[68437]: INFO nova.scheduler.client.report [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Deleted allocations for instance 29e9555b-f928-43e7-a3a3-869ed07d7326 [ 1014.261663] env[68437]: DEBUG nova.network.neutron [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Port b9c19590-2f8d-4149-989f-8d0fd1e5fe29 binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 1014.494764] env[68437]: DEBUG oslo_vmware.api [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944595, 'name': RemoveSnapshot_Task, 'duration_secs': 1.373596} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.496753] env[68437]: DEBUG nova.network.neutron [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updating instance_info_cache with network_info: [{"id": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "address": "fa:16:3e:b0:0c:e5", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22dccb0-3e", "ovs_interfaceid": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.498149] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1014.595595] env[68437]: DEBUG oslo_vmware.api [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Task: {'id': task-2944598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.437636} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.595767] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1014.595967] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1014.596070] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1014.596274] env[68437]: INFO nova.compute.manager [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1014.596454] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1014.596652] env[68437]: DEBUG nova.compute.manager [-] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1014.596749] env[68437]: DEBUG nova.network.neutron [-] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1014.682256] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf38badb-1156-4a76-82ed-88d8da80e788 tempest-ServersTestBootFromVolume-937193156 tempest-ServersTestBootFromVolume-937193156-project-member] Lock "29e9555b-f928-43e7-a3a3-869ed07d7326" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.663s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.726498] env[68437]: DEBUG oslo_concurrency.lockutils [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.726773] env[68437]: DEBUG oslo_concurrency.lockutils [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.727085] env[68437]: DEBUG nova.objects.instance [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'flavor' on Instance uuid 3f82b137-81d5-4754-b222-3cefce0b2a10 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.947155] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7c71e2-75f0-4fa8-8c2c-69d8f6a3dd7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.960432] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8996227b-bc6c-4e18-9741-7f7ccd0de4c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.004954] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.008565] env[68437]: WARNING nova.compute.manager [None req-14cb91b4-f4f2-40a0-beaf-62ee21bd0d06 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Image not found during snapshot: nova.exception.ImageNotFound: Image 5afa4611-8116-4794-96e6-c9bbdf81c4a5 could not be found. [ 1015.013575] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17aecb27-10bf-4103-9903-231c4f8436fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.024894] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb26181-7f43-432a-b60c-f0f383e3be2a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.046150] env[68437]: DEBUG nova.compute.provider_tree [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.244019] env[68437]: DEBUG nova.compute.manager [req-d9f7e2d3-461c-447d-87da-bb854e6c05e2 req-43531025-6e67-420a-a972-8b9a4ad4d3b2 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Received event network-vif-deleted-7be66ccd-47fb-4b51-ac58-a60ae0578274 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1015.244268] env[68437]: INFO nova.compute.manager [req-d9f7e2d3-461c-447d-87da-bb854e6c05e2 req-43531025-6e67-420a-a972-8b9a4ad4d3b2 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Neutron deleted interface 7be66ccd-47fb-4b51-ac58-a60ae0578274; detaching it from the instance and deleting it from the info cache [ 1015.244587] env[68437]: DEBUG nova.network.neutron [req-d9f7e2d3-461c-447d-87da-bb854e6c05e2 req-43531025-6e67-420a-a972-8b9a4ad4d3b2 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.294058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "697d5011-fb4e-4542-851b-39953bbb293d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.294317] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.294465] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.336692] env[68437]: DEBUG nova.objects.instance [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'pci_requests' on Instance uuid 3f82b137-81d5-4754-b222-3cefce0b2a10 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.352672] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquiring lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.352912] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.433497] env[68437]: DEBUG nova.network.neutron [-] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.550763] env[68437]: DEBUG nova.scheduler.client.report [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1015.748652] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6052b70-d2ed-42f1-a301-1b331b442b3d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.760245] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab4c4c0-7c9e-44cc-8d0c-6717661d1a36 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.793689] env[68437]: DEBUG nova.compute.manager [req-d9f7e2d3-461c-447d-87da-bb854e6c05e2 req-43531025-6e67-420a-a972-8b9a4ad4d3b2 service nova] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Detach interface failed, port_id=7be66ccd-47fb-4b51-ac58-a60ae0578274, reason: Instance 3a2dad52-63d3-46ec-ac43-3922bca3919e could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1015.820195] env[68437]: DEBUG oslo_concurrency.lockutils [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.820530] env[68437]: DEBUG oslo_concurrency.lockutils [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.820749] env[68437]: DEBUG oslo_concurrency.lockutils [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.820934] env[68437]: DEBUG oslo_concurrency.lockutils [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.821116] env[68437]: DEBUG oslo_concurrency.lockutils [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.823114] env[68437]: INFO nova.compute.manager [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Terminating instance [ 1015.839580] env[68437]: DEBUG nova.objects.base [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Object Instance<3f82b137-81d5-4754-b222-3cefce0b2a10> lazy-loaded attributes: flavor,pci_requests {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1015.839798] env[68437]: DEBUG nova.network.neutron [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1015.855248] env[68437]: DEBUG nova.compute.manager [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1015.881899] env[68437]: DEBUG nova.policy [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '894a53f57a104c51945fa90c168a0483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68baf1daffa842b4adb854fe0cec9524', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1015.936096] env[68437]: INFO nova.compute.manager [-] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Took 1.34 seconds to deallocate network for instance. [ 1016.057295] env[68437]: DEBUG oslo_concurrency.lockutils [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.910s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.062487] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.337s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.067447] env[68437]: INFO nova.compute.claims [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1016.090556] env[68437]: INFO nova.scheduler.client.report [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Deleted allocations for instance 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8 [ 1016.231788] env[68437]: DEBUG nova.network.neutron [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Successfully created port: 6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1016.328121] env[68437]: DEBUG nova.compute.manager [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1016.328505] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1016.329831] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7f50e5-97fa-45b4-baa0-4256d2061c0f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.340657] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1016.341172] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-979004f9-d209-4687-84bb-158a6659af77 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.348346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.348346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.348346] env[68437]: DEBUG nova.network.neutron [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1016.351257] env[68437]: DEBUG oslo_vmware.api [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1016.351257] env[68437]: value = "task-2944599" [ 1016.351257] env[68437]: _type = "Task" [ 1016.351257] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.365695] env[68437]: DEBUG oslo_vmware.api [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.386462] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.445364] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.576672] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1016.577630] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6578329-cfda-4588-8588-a6601ddac266 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.590023] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1016.590023] env[68437]: value = "task-2944600" [ 1016.590023] env[68437]: _type = "Task" [ 1016.590023] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.602221] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.603038] env[68437]: DEBUG oslo_concurrency.lockutils [None req-64be859b-896c-4db6-a320-a90ae77108fe tempest-ServersTestJSON-426758050 tempest-ServersTestJSON-426758050-project-member] Lock "7ae346fa-fbb2-4fd7-b620-f0dda8243ca8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.395s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.865079] env[68437]: DEBUG oslo_vmware.api [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944599, 'name': PowerOffVM_Task, 'duration_secs': 0.221436} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.865356] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1016.865519] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1016.865767] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4e714bd-0cb7-4149-995a-be778b647ff5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.937898] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1016.938138] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1016.938321] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleting the datastore file [datastore1] ae32443d-3b55-4bd7-8f07-e66d206ec1d1 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1016.938588] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f69b682e-77fd-4b67-9209-671c9a3d53cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.947052] env[68437]: DEBUG oslo_vmware.api [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for the task: (returnval){ [ 1016.947052] env[68437]: value = "task-2944602" [ 1016.947052] env[68437]: _type = "Task" [ 1016.947052] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.958910] env[68437]: DEBUG oslo_vmware.api [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944602, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.093823] env[68437]: DEBUG nova.network.neutron [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance_info_cache with network_info: [{"id": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "address": "fa:16:3e:59:da:22", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9c19590-2f", "ovs_interfaceid": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.102242] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944600, 'name': PowerOffVM_Task, 'duration_secs': 0.208966} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.107624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.111551] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37765bd5-c2b6-4da6-9b79-967a2755ca76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.143971] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee6c974-1a73-4b5f-964c-31cab3a402d6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.189726] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1017.190055] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db35553f-1e28-4c09-a636-ad9d8eee84d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.200623] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1017.200623] env[68437]: value = "task-2944603" [ 1017.200623] env[68437]: _type = "Task" [ 1017.200623] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.213617] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.373579] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d7445c-44a0-4350-a30c-fabc3ac6faaa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.386335] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f11746-0351-4a8c-941b-677854ebff7f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.418319] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fcff93-caa2-4135-8385-54ab3e2f74ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.427194] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107515c4-6ad4-4724-9742-d5b7b9e12393 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.444176] env[68437]: DEBUG nova.compute.provider_tree [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.455901] env[68437]: DEBUG oslo_vmware.api [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Task: {'id': task-2944602, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170281} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.456914] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.457223] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1017.457333] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1017.457503] env[68437]: INFO nova.compute.manager [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1017.457742] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1017.458243] env[68437]: DEBUG nova.compute.manager [-] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1017.458341] env[68437]: DEBUG nova.network.neutron [-] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1017.597467] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.713675] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1017.713919] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.714430] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.714631] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.714957] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.715428] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88b5ead4-234c-429f-9cae-4c7399f91a7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.729265] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.729517] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.730314] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7059859a-f42d-452e-9464-084f32bf8229 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.738770] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1017.738770] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ad044b-7300-8d6f-c902-e52898b4761c" [ 1017.738770] env[68437]: _type = "Task" [ 1017.738770] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.750204] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ad044b-7300-8d6f-c902-e52898b4761c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.891568] env[68437]: DEBUG nova.compute.manager [req-b42b5648-ae87-4f4c-9897-f96dfbd43141 req-20c3fd34-8da3-4086-b474-3befb620aa1f service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-vif-plugged-6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1017.891568] env[68437]: DEBUG oslo_concurrency.lockutils [req-b42b5648-ae87-4f4c-9897-f96dfbd43141 req-20c3fd34-8da3-4086-b474-3befb620aa1f service nova] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.891749] env[68437]: DEBUG oslo_concurrency.lockutils [req-b42b5648-ae87-4f4c-9897-f96dfbd43141 req-20c3fd34-8da3-4086-b474-3befb620aa1f service nova] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.891959] env[68437]: DEBUG oslo_concurrency.lockutils [req-b42b5648-ae87-4f4c-9897-f96dfbd43141 req-20c3fd34-8da3-4086-b474-3befb620aa1f service nova] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.892598] env[68437]: DEBUG nova.compute.manager [req-b42b5648-ae87-4f4c-9897-f96dfbd43141 req-20c3fd34-8da3-4086-b474-3befb620aa1f service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] No waiting events found dispatching network-vif-plugged-6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1017.894230] env[68437]: WARNING nova.compute.manager [req-b42b5648-ae87-4f4c-9897-f96dfbd43141 req-20c3fd34-8da3-4086-b474-3befb620aa1f service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received unexpected event network-vif-plugged-6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 for instance with vm_state active and task_state None. [ 1017.924946] env[68437]: DEBUG nova.network.neutron [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Successfully updated port: 6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1017.946972] env[68437]: DEBUG nova.scheduler.client.report [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1018.125789] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb72c56-9140-4ffa-aa54-faf61526164b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.149709] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca5a40f-8b84-4927-946a-02241bf4a203 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.157924] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance '697d5011-fb4e-4542-851b-39953bbb293d' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1018.250457] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ad044b-7300-8d6f-c902-e52898b4761c, 'name': SearchDatastore_Task, 'duration_secs': 0.010588} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.251311] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acddb6bb-bb65-4c2d-849c-56d844dd41ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.258053] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1018.258053] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5226de74-8ac3-abba-58a3-8bac01a387c5" [ 1018.258053] env[68437]: _type = "Task" [ 1018.258053] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.266598] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5226de74-8ac3-abba-58a3-8bac01a387c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.340763] env[68437]: DEBUG nova.network.neutron [-] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.429538] env[68437]: DEBUG oslo_concurrency.lockutils [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.430010] env[68437]: DEBUG oslo_concurrency.lockutils [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.430221] env[68437]: DEBUG nova.network.neutron [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1018.452028] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.452134] env[68437]: DEBUG nova.compute.manager [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1018.455355] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.891s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.455718] env[68437]: DEBUG nova.objects.instance [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'resources' on Instance uuid 05e07d7c-0161-463c-89f7-1bf28f680bde {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.666162] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1018.666162] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbcc00c0-3b89-41fc-9c64-ebf0bf242140 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.683658] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1018.683658] env[68437]: value = "task-2944604" [ 1018.683658] env[68437]: _type = "Task" [ 1018.683658] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.692659] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944604, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.769470] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5226de74-8ac3-abba-58a3-8bac01a387c5, 'name': SearchDatastore_Task, 'duration_secs': 0.011678} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.769758] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.770022] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] a01364f9-e30d-4140-ae41-1e7c4aaa2251/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. {{(pid=68437) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1018.770295] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a34e1509-7f2e-4e32-aac5-17064d91a2fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.779302] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1018.779302] env[68437]: value = "task-2944605" [ 1018.779302] env[68437]: _type = "Task" [ 1018.779302] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.790182] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944605, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.843677] env[68437]: INFO nova.compute.manager [-] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Took 1.39 seconds to deallocate network for instance. [ 1018.964490] env[68437]: DEBUG nova.compute.utils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1018.967683] env[68437]: DEBUG nova.compute.manager [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1018.967683] env[68437]: DEBUG nova.network.neutron [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1018.969925] env[68437]: WARNING nova.network.neutron [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] 6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88 already exists in list: networks containing: ['6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88']. ignoring it [ 1019.052934] env[68437]: DEBUG nova.policy [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7605d44a5b5448a3966872b4f524d13c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40d8becefc85431b9723c72aa09d152b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1019.321873] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944604, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.321873] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fa4ca9-62b4-4b7d-95e6-2d9fc0ee56fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.321873] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4628db52-5edf-4121-8901-bd94d77c2efd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.321873] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944605, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.335831] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66287008-efa7-4406-9200-178aa403d599 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.344522] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e3d4c4-14c5-4581-b41d-6d197a7db8e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.351860] env[68437]: DEBUG oslo_concurrency.lockutils [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.363271] env[68437]: DEBUG nova.compute.provider_tree [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.473069] env[68437]: DEBUG nova.compute.manager [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1019.501217] env[68437]: DEBUG nova.network.neutron [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [{"id": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "address": "fa:16:3e:ed:52:c2", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd525d9d-aa", "ovs_interfaceid": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6ccc745c-15f0-4593-b5f9-a8bab6edf0d6", "address": "fa:16:3e:c9:14:75", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ccc745c-15", "ovs_interfaceid": "6ccc745c-15f0-4593-b5f9-a8bab6edf0d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.516351] env[68437]: DEBUG nova.network.neutron [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Successfully created port: e6e03f32-de4c-4405-9930-c4a70c9d560d {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1019.696988] env[68437]: DEBUG oslo_vmware.api [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944604, 'name': PowerOnVM_Task, 'duration_secs': 0.528872} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.697652] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1019.697982] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1497e852-5974-4c9f-bbfd-468d935cebb1 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance '697d5011-fb4e-4542-851b-39953bbb293d' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1019.794706] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944605, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558575} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.795027] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] a01364f9-e30d-4140-ae41-1e7c4aaa2251/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk. [ 1019.795952] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cea40d-0711-4848-901d-4d6cb40358a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.823584] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] a01364f9-e30d-4140-ae41-1e7c4aaa2251/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.824427] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d952575-7cb4-452a-91d1-d439aa0314fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.844513] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1019.844513] env[68437]: value = "task-2944606" [ 1019.844513] env[68437]: _type = "Task" [ 1019.844513] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.854031] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944606, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.866129] env[68437]: DEBUG nova.scheduler.client.report [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.922018] env[68437]: DEBUG nova.compute.manager [req-9f9d0abe-000e-4cd6-a5d9-901bcc1837a5 req-c7e44244-cf3b-47d9-9c9b-9d94fc49adff service nova] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Received event network-vif-deleted-ae035d33-feaf-43d3-a5ed-93b396819be3 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1019.922018] env[68437]: DEBUG nova.compute.manager [req-9f9d0abe-000e-4cd6-a5d9-901bcc1837a5 req-c7e44244-cf3b-47d9-9c9b-9d94fc49adff service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-changed-6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1019.922018] env[68437]: DEBUG nova.compute.manager [req-9f9d0abe-000e-4cd6-a5d9-901bcc1837a5 req-c7e44244-cf3b-47d9-9c9b-9d94fc49adff service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Refreshing instance network info cache due to event network-changed-6ccc745c-15f0-4593-b5f9-a8bab6edf0d6. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1019.922284] env[68437]: DEBUG oslo_concurrency.lockutils [req-9f9d0abe-000e-4cd6-a5d9-901bcc1837a5 req-c7e44244-cf3b-47d9-9c9b-9d94fc49adff service nova] Acquiring lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.004635] env[68437]: DEBUG oslo_concurrency.lockutils [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.005696] env[68437]: DEBUG oslo_concurrency.lockutils [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.005937] env[68437]: DEBUG oslo_concurrency.lockutils [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.006348] env[68437]: DEBUG oslo_concurrency.lockutils [req-9f9d0abe-000e-4cd6-a5d9-901bcc1837a5 req-c7e44244-cf3b-47d9-9c9b-9d94fc49adff service nova] Acquired lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.006567] env[68437]: DEBUG nova.network.neutron [req-9f9d0abe-000e-4cd6-a5d9-901bcc1837a5 req-c7e44244-cf3b-47d9-9c9b-9d94fc49adff service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Refreshing network info cache for port 6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1020.009401] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675cc00a-e62a-4d1c-8258-c053dcd7a445 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.042389] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1020.042974] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1020.043229] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1020.043528] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1020.043789] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1020.044007] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1020.044306] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1020.044546] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1020.048018] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1020.048018] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1020.048018] env[68437]: DEBUG nova.virt.hardware [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1020.051726] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Reconfiguring VM to attach interface {{(pid=68437) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1020.052614] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09b2f4ea-eed9-4687-86c3-66e519d52001 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.072911] env[68437]: DEBUG oslo_vmware.api [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1020.072911] env[68437]: value = "task-2944607" [ 1020.072911] env[68437]: _type = "Task" [ 1020.072911] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.082482] env[68437]: DEBUG oslo_vmware.api [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944607, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.355506] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944606, 'name': ReconfigVM_Task, 'duration_secs': 0.43861} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.356462] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfigured VM instance instance-0000002d to attach disk [datastore1] a01364f9-e30d-4140-ae41-1e7c4aaa2251/a272f526-6b8d-4a29-bd06-cd29ab5fabbe-rescue.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.356867] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be81410-b8f9-43a7-b5c3-bde2712ab6c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.383263] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.926s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.387955] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.287s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.389984] env[68437]: INFO nova.compute.claims [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.393206] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70685176-3bab-400d-bc32-4c53f47333c6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.410520] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1020.410520] env[68437]: value = "task-2944608" [ 1020.410520] env[68437]: _type = "Task" [ 1020.410520] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.420545] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944608, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.421719] env[68437]: INFO nova.scheduler.client.report [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted allocations for instance 05e07d7c-0161-463c-89f7-1bf28f680bde [ 1020.484592] env[68437]: DEBUG nova.compute.manager [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1020.519523] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1020.519834] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1020.520088] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1020.520395] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1020.520616] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1020.520912] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1020.521315] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1020.521557] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1020.521821] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1020.522132] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1020.522458] env[68437]: DEBUG nova.virt.hardware [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1020.523544] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3beb75c0-5b8e-4367-82a9-ab009a3dc894 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.535684] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ed8eee-7748-4586-9f9e-bd5376d57876 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.588617] env[68437]: DEBUG oslo_vmware.api [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944607, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.750753] env[68437]: DEBUG nova.network.neutron [req-9f9d0abe-000e-4cd6-a5d9-901bcc1837a5 req-c7e44244-cf3b-47d9-9c9b-9d94fc49adff service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updated VIF entry in instance network info cache for port 6ccc745c-15f0-4593-b5f9-a8bab6edf0d6. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1020.751272] env[68437]: DEBUG nova.network.neutron [req-9f9d0abe-000e-4cd6-a5d9-901bcc1837a5 req-c7e44244-cf3b-47d9-9c9b-9d94fc49adff service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [{"id": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "address": "fa:16:3e:ed:52:c2", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd525d9d-aa", "ovs_interfaceid": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6ccc745c-15f0-4593-b5f9-a8bab6edf0d6", "address": "fa:16:3e:c9:14:75", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ccc745c-15", "ovs_interfaceid": "6ccc745c-15f0-4593-b5f9-a8bab6edf0d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.922784] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944608, 'name': ReconfigVM_Task, 'duration_secs': 0.277066} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.923836] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1020.923836] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57eb0dfd-73e7-4378-86fc-e3a63a50434a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.929014] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1d2472c3-3f70-4fe0-b2ab-60d0da89236e tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "05e07d7c-0161-463c-89f7-1bf28f680bde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.896s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.931156] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1020.931156] env[68437]: value = "task-2944609" [ 1020.931156] env[68437]: _type = "Task" [ 1020.931156] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.942688] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944609, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.086588] env[68437]: DEBUG oslo_vmware.api [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944607, 'name': ReconfigVM_Task, 'duration_secs': 0.602321} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.086981] env[68437]: DEBUG oslo_concurrency.lockutils [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.087258] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Reconfigured VM to attach interface {{(pid=68437) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1021.091112] env[68437]: DEBUG nova.network.neutron [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Successfully updated port: e6e03f32-de4c-4405-9930-c4a70c9d560d {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1021.254577] env[68437]: DEBUG oslo_concurrency.lockutils [req-9f9d0abe-000e-4cd6-a5d9-901bcc1837a5 req-c7e44244-cf3b-47d9-9c9b-9d94fc49adff service nova] Releasing lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.453797] env[68437]: DEBUG oslo_vmware.api [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944609, 'name': PowerOnVM_Task, 'duration_secs': 0.42234} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.457394] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1021.464660] env[68437]: DEBUG nova.compute.manager [None req-b0d8f304-d76d-44f6-b175-32ed742307e5 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1021.464660] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8c9666-38f4-42cf-b5f7-69e7059f941a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.570997] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "697d5011-fb4e-4542-851b-39953bbb293d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.571314] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.571558] env[68437]: DEBUG nova.compute.manager [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Going to confirm migration 4 {{(pid=68437) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1021.592420] env[68437]: DEBUG oslo_concurrency.lockutils [None req-82aaefec-15b1-4095-8e3b-915f0f893d3a tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.866s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.594031] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "refresh_cache-f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.594031] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "refresh_cache-f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.594281] env[68437]: DEBUG nova.network.neutron [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1021.675861] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd439871-ad5f-4a0d-ae05-cb648a2a542a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.684541] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edc3c02-f4a3-4a39-90c2-100bf9eac210 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.724448] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca711dee-edd5-4daa-aae8-fdfb68b82a2c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.734252] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778c064b-4333-48b0-8e7a-411a2891f640 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.749434] env[68437]: DEBUG nova.compute.provider_tree [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.119344] env[68437]: DEBUG nova.compute.manager [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Received event network-vif-plugged-e6e03f32-de4c-4405-9930-c4a70c9d560d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1022.119556] env[68437]: DEBUG oslo_concurrency.lockutils [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] Acquiring lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.119756] env[68437]: DEBUG oslo_concurrency.lockutils [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.120027] env[68437]: DEBUG oslo_concurrency.lockutils [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.120088] env[68437]: DEBUG nova.compute.manager [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] No waiting events found dispatching network-vif-plugged-e6e03f32-de4c-4405-9930-c4a70c9d560d {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1022.120256] env[68437]: WARNING nova.compute.manager [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Received unexpected event network-vif-plugged-e6e03f32-de4c-4405-9930-c4a70c9d560d for instance with vm_state building and task_state spawning. [ 1022.120461] env[68437]: DEBUG nova.compute.manager [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Received event network-changed-e6e03f32-de4c-4405-9930-c4a70c9d560d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1022.120554] env[68437]: DEBUG nova.compute.manager [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Refreshing instance network info cache due to event network-changed-e6e03f32-de4c-4405-9930-c4a70c9d560d. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1022.120721] env[68437]: DEBUG oslo_concurrency.lockutils [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] Acquiring lock "refresh_cache-f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.169406] env[68437]: DEBUG nova.network.neutron [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1022.215803] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.215941] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.216133] env[68437]: DEBUG nova.network.neutron [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1022.216343] env[68437]: DEBUG nova.objects.instance [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lazy-loading 'info_cache' on Instance uuid 697d5011-fb4e-4542-851b-39953bbb293d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.256068] env[68437]: DEBUG nova.scheduler.client.report [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.330416] env[68437]: DEBUG nova.network.neutron [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Updating instance_info_cache with network_info: [{"id": "e6e03f32-de4c-4405-9930-c4a70c9d560d", "address": "fa:16:3e:56:96:d3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6e03f32-de", "ovs_interfaceid": "e6e03f32-de4c-4405-9930-c4a70c9d560d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.630251] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.630594] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.763186] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.375s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.763186] env[68437]: DEBUG nova.compute.manager [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1022.766648] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.380s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.767957] env[68437]: INFO nova.compute.claims [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.836020] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "refresh_cache-f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.836020] env[68437]: DEBUG nova.compute.manager [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Instance network_info: |[{"id": "e6e03f32-de4c-4405-9930-c4a70c9d560d", "address": "fa:16:3e:56:96:d3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6e03f32-de", "ovs_interfaceid": "e6e03f32-de4c-4405-9930-c4a70c9d560d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1022.836020] env[68437]: DEBUG oslo_concurrency.lockutils [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] Acquired lock "refresh_cache-f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.836020] env[68437]: DEBUG nova.network.neutron [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Refreshing network info cache for port e6e03f32-de4c-4405-9930-c4a70c9d560d {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1022.836020] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:96:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6e03f32-de4c-4405-9930-c4a70c9d560d', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1022.846380] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1022.847649] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1022.848117] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-952d5bb6-318b-4032-a0c0-550cecffcf24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.873554] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1022.873554] env[68437]: value = "task-2944610" [ 1022.873554] env[68437]: _type = "Task" [ 1022.873554] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.882417] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944610, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.133534] env[68437]: DEBUG nova.compute.manager [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1023.263540] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquiring lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.263803] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.278176] env[68437]: DEBUG nova.compute.utils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1023.279632] env[68437]: DEBUG nova.compute.manager [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1023.279870] env[68437]: DEBUG nova.network.neutron [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1023.357582] env[68437]: DEBUG nova.policy [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '779d2c27ec624bd2830b20608995cb2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68970ec925754b3faeba3b431241ce29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1023.383783] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944610, 'name': CreateVM_Task, 'duration_secs': 0.414211} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.386483] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1023.387104] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.387263] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.387578] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1023.388238] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c6391a6-6a9f-4097-817c-f0b555ecbfad {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.395776] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1023.395776] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52139851-9885-ba06-c855-c4a3c923a5f3" [ 1023.395776] env[68437]: _type = "Task" [ 1023.395776] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.404943] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52139851-9885-ba06-c855-c4a3c923a5f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.407645] env[68437]: INFO nova.compute.manager [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Unrescuing [ 1023.407877] env[68437]: DEBUG oslo_concurrency.lockutils [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.408243] env[68437]: DEBUG oslo_concurrency.lockutils [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquired lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.408243] env[68437]: DEBUG nova.network.neutron [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1023.437179] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-834e4e03-93e8-487e-bb7d-d4774e7092d7" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.437443] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-834e4e03-93e8-487e-bb7d-d4774e7092d7" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.437782] env[68437]: DEBUG nova.objects.instance [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'flavor' on Instance uuid 3f82b137-81d5-4754-b222-3cefce0b2a10 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.612687] env[68437]: DEBUG nova.network.neutron [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance_info_cache with network_info: [{"id": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "address": "fa:16:3e:59:da:22", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9c19590-2f", "ovs_interfaceid": "b9c19590-2f8d-4149-989f-8d0fd1e5fe29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.657586] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.733316] env[68437]: DEBUG nova.network.neutron [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Updated VIF entry in instance network info cache for port e6e03f32-de4c-4405-9930-c4a70c9d560d. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1023.733778] env[68437]: DEBUG nova.network.neutron [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Updating instance_info_cache with network_info: [{"id": "e6e03f32-de4c-4405-9930-c4a70c9d560d", "address": "fa:16:3e:56:96:d3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6e03f32-de", "ovs_interfaceid": "e6e03f32-de4c-4405-9930-c4a70c9d560d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.767073] env[68437]: DEBUG nova.compute.manager [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1023.784305] env[68437]: DEBUG nova.compute.manager [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1023.865525] env[68437]: DEBUG nova.network.neutron [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Successfully created port: d21ad3db-ccd9-4d63-9eb0-4620abdab063 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1023.916700] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52139851-9885-ba06-c855-c4a3c923a5f3, 'name': SearchDatastore_Task, 'duration_secs': 0.010317} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.918880] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.918880] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1023.918880] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.918880] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.918880] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1023.921519] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a8bc930-0bac-4906-a7bf-cfff49c4e862 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.932062] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1023.932273] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1023.935467] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25ba093c-355a-49e5-a34a-abfbc6ae9eb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.945161] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1023.945161] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e50967-7a99-4b84-bd6a-51653eac29f9" [ 1023.945161] env[68437]: _type = "Task" [ 1023.945161] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.957550] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e50967-7a99-4b84-bd6a-51653eac29f9, 'name': SearchDatastore_Task, 'duration_secs': 0.010901} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.960056] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6260b84-1b12-4a90-957d-5170ef388937 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.965639] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1023.965639] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5204208e-a6e6-4bec-22f8-dd37ae53e65e" [ 1023.965639] env[68437]: _type = "Task" [ 1023.965639] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.981420] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5204208e-a6e6-4bec-22f8-dd37ae53e65e, 'name': SearchDatastore_Task, 'duration_secs': 0.011007} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.981777] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.982106] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28/f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1023.982436] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f0d809d-a72c-437f-9531-811f8eb1153d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.993763] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1023.993763] env[68437]: value = "task-2944611" [ 1023.993763] env[68437]: _type = "Task" [ 1023.993763] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.005495] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944611, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.098372] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50877249-7ed2-4dfe-bbfe-26e0ac4992b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.108813] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63f7a04-6f7c-4c90-84d4-1364431d8207 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.115890] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-697d5011-fb4e-4542-851b-39953bbb293d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.116210] env[68437]: DEBUG nova.objects.instance [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lazy-loading 'migration_context' on Instance uuid 697d5011-fb4e-4542-851b-39953bbb293d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.153711] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6cae66-133d-4fcb-a53b-42b64c83b330 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.163961] env[68437]: DEBUG nova.objects.instance [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'pci_requests' on Instance uuid 3f82b137-81d5-4754-b222-3cefce0b2a10 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.166289] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd1bcf6-f4d3-490d-acb8-3aa3b62a091f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.189593] env[68437]: DEBUG nova.compute.provider_tree [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.234485] env[68437]: DEBUG nova.compute.manager [None req-ecf6b9a9-924a-4301-8f47-ef4f88019b51 tempest-ServerDiagnosticsV248Test-191375725 tempest-ServerDiagnosticsV248Test-191375725-project-admin] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.236315] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fa2475-2fd9-4ade-9c27-f2737d480eda {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.241707] env[68437]: DEBUG oslo_concurrency.lockutils [req-97f235cf-a957-410d-8f3d-ed35e03fb9bc req-aec6c41f-3409-4c67-bc6b-3fda78ef042d service nova] Releasing lock "refresh_cache-f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.248518] env[68437]: INFO nova.compute.manager [None req-ecf6b9a9-924a-4301-8f47-ef4f88019b51 tempest-ServerDiagnosticsV248Test-191375725 tempest-ServerDiagnosticsV248Test-191375725-project-admin] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Retrieving diagnostics [ 1024.249613] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb2b270-d286-43bc-8a84-5b56f9b58595 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.323882] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.506333] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944611, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475725} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.506645] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28/f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1024.506859] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1024.507144] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0c961b4-1247-40da-9100-6f69b87aeb7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.515110] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1024.515110] env[68437]: value = "task-2944612" [ 1024.515110] env[68437]: _type = "Task" [ 1024.515110] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.523656] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944612, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.599700] env[68437]: DEBUG nova.network.neutron [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updating instance_info_cache with network_info: [{"id": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "address": "fa:16:3e:b0:0c:e5", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22dccb0-3e", "ovs_interfaceid": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.651484] env[68437]: DEBUG nova.objects.base [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Object Instance<697d5011-fb4e-4542-851b-39953bbb293d> lazy-loaded attributes: info_cache,migration_context {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1024.652426] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8d6dc6-2671-4735-be70-e3f1e7869a5c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.672262] env[68437]: DEBUG nova.objects.base [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Object Instance<3f82b137-81d5-4754-b222-3cefce0b2a10> lazy-loaded attributes: flavor,pci_requests {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1024.672615] env[68437]: DEBUG nova.network.neutron [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1024.675221] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-759b1133-7d85-4f86-af78-ecb9de563c80 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.682786] env[68437]: DEBUG oslo_vmware.api [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1024.682786] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52561a2f-88ac-9ba7-0c47-29527bfcf0da" [ 1024.682786] env[68437]: _type = "Task" [ 1024.682786] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.691573] env[68437]: DEBUG oslo_vmware.api [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52561a2f-88ac-9ba7-0c47-29527bfcf0da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.693543] env[68437]: DEBUG nova.scheduler.client.report [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1024.740037] env[68437]: DEBUG nova.policy [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '894a53f57a104c51945fa90c168a0483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68baf1daffa842b4adb854fe0cec9524', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1024.800188] env[68437]: DEBUG nova.compute.manager [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1024.827941] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1024.828214] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.828376] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1024.829023] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.829023] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1024.829023] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1024.829201] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1024.829201] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1024.829388] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1024.829552] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1024.829737] env[68437]: DEBUG nova.virt.hardware [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1024.830981] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90c331d-dc72-448b-b85d-cc4f10009e88 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.839112] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf369d69-c925-41bc-a96e-a7974a50219c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.027904] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944612, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.305443} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.028224] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1025.028996] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d9f932-c46f-475c-99f8-f76df08b6814 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.051254] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28/f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1025.051522] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df630601-c775-40cb-8645-255cc4570971 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.074947] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1025.074947] env[68437]: value = "task-2944613" [ 1025.074947] env[68437]: _type = "Task" [ 1025.074947] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.083972] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944613, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.102785] env[68437]: DEBUG oslo_concurrency.lockutils [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Releasing lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.103503] env[68437]: DEBUG nova.objects.instance [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lazy-loading 'flavor' on Instance uuid a01364f9-e30d-4140-ae41-1e7c4aaa2251 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.194797] env[68437]: DEBUG oslo_vmware.api [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52561a2f-88ac-9ba7-0c47-29527bfcf0da, 'name': SearchDatastore_Task, 'duration_secs': 0.008918} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.195172] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.198023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.199026] env[68437]: DEBUG nova.compute.manager [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1025.200951] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.756s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.201164] env[68437]: DEBUG nova.objects.instance [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lazy-loading 'resources' on Instance uuid 3a2dad52-63d3-46ec-ac43-3922bca3919e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.505754] env[68437]: DEBUG nova.compute.manager [req-22853f71-4bb5-496c-a564-b3e0f4b30887 req-9b9bde8a-8828-46d5-b4ed-98b32d8ec13f service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Received event network-vif-plugged-d21ad3db-ccd9-4d63-9eb0-4620abdab063 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1025.506124] env[68437]: DEBUG oslo_concurrency.lockutils [req-22853f71-4bb5-496c-a564-b3e0f4b30887 req-9b9bde8a-8828-46d5-b4ed-98b32d8ec13f service nova] Acquiring lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.506434] env[68437]: DEBUG oslo_concurrency.lockutils [req-22853f71-4bb5-496c-a564-b3e0f4b30887 req-9b9bde8a-8828-46d5-b4ed-98b32d8ec13f service nova] Lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.506713] env[68437]: DEBUG oslo_concurrency.lockutils [req-22853f71-4bb5-496c-a564-b3e0f4b30887 req-9b9bde8a-8828-46d5-b4ed-98b32d8ec13f service nova] Lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.507000] env[68437]: DEBUG nova.compute.manager [req-22853f71-4bb5-496c-a564-b3e0f4b30887 req-9b9bde8a-8828-46d5-b4ed-98b32d8ec13f service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] No waiting events found dispatching network-vif-plugged-d21ad3db-ccd9-4d63-9eb0-4620abdab063 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1025.507299] env[68437]: WARNING nova.compute.manager [req-22853f71-4bb5-496c-a564-b3e0f4b30887 req-9b9bde8a-8828-46d5-b4ed-98b32d8ec13f service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Received unexpected event network-vif-plugged-d21ad3db-ccd9-4d63-9eb0-4620abdab063 for instance with vm_state building and task_state spawning. [ 1025.572736] env[68437]: DEBUG nova.network.neutron [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Successfully updated port: d21ad3db-ccd9-4d63-9eb0-4620abdab063 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1025.576616] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquiring lock "e51356e4-7647-4678-bb4f-f069b5c7fef6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.576715] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "e51356e4-7647-4678-bb4f-f069b5c7fef6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.577522] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquiring lock "e51356e4-7647-4678-bb4f-f069b5c7fef6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.577522] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "e51356e4-7647-4678-bb4f-f069b5c7fef6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.577522] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "e51356e4-7647-4678-bb4f-f069b5c7fef6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.583185] env[68437]: INFO nova.compute.manager [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Terminating instance [ 1025.591922] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944613, 'name': ReconfigVM_Task, 'duration_secs': 0.275573} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.592638] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Reconfigured VM instance instance-0000004e to attach disk [datastore2] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28/f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1025.593276] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cbaceb9-2779-4c02-bb28-34aea7a5c327 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.600900] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1025.600900] env[68437]: value = "task-2944614" [ 1025.600900] env[68437]: _type = "Task" [ 1025.600900] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.610233] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944614, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.611195] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e7f196-b86b-4d75-8000-47568ce47272 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.634671] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.635048] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-803f2430-8b72-4741-ac35-75750f32f7e9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.642684] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1025.642684] env[68437]: value = "task-2944615" [ 1025.642684] env[68437]: _type = "Task" [ 1025.642684] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.651063] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.704377] env[68437]: DEBUG nova.compute.utils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1025.709816] env[68437]: DEBUG nova.compute.manager [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1025.709816] env[68437]: DEBUG nova.network.neutron [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1025.752402] env[68437]: DEBUG nova.policy [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4cb969f8ea340738201313bb0018918', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '440e76accc0f4841844dafa8075cfc20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1025.936856] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8d64c0-54ba-4286-a681-4b70fafbd05a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.945878] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b650007-6e18-43db-b80a-112be590a386 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.982205] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8b651a-31e7-48bf-9087-f768f659e888 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.993352] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783dfa43-f446-4621-9db2-ceff6fe03cdd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.007942] env[68437]: DEBUG nova.compute.provider_tree [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.075670] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquiring lock "refresh_cache-ccad008b-0a3a-4234-9c4c-c3a5230a938e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.075670] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquired lock "refresh_cache-ccad008b-0a3a-4234-9c4c-c3a5230a938e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.075670] env[68437]: DEBUG nova.network.neutron [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1026.082262] env[68437]: DEBUG nova.network.neutron [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Successfully created port: 797183d0-327b-4e58-9355-0ba9d8beecdc {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1026.087794] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquiring lock "refresh_cache-e51356e4-7647-4678-bb4f-f069b5c7fef6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.088029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquired lock "refresh_cache-e51356e4-7647-4678-bb4f-f069b5c7fef6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.088243] env[68437]: DEBUG nova.network.neutron [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1026.112106] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944614, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.154065] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944615, 'name': PowerOffVM_Task, 'duration_secs': 0.246768} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.154065] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.159036] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfiguring VM instance instance-0000002d to detach disk 2002 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1026.159431] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc52a909-39a5-4b6f-b3a6-ef65ea1212c6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.179581] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1026.179581] env[68437]: value = "task-2944616" [ 1026.179581] env[68437]: _type = "Task" [ 1026.179581] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.189817] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944616, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.209768] env[68437]: DEBUG nova.compute.manager [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1026.416209] env[68437]: DEBUG nova.network.neutron [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Successfully updated port: 834e4e03-93e8-487e-bb7d-d4774e7092d7 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1026.511345] env[68437]: DEBUG nova.scheduler.client.report [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1026.611609] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944614, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.615101] env[68437]: DEBUG nova.network.neutron [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1026.627624] env[68437]: DEBUG nova.network.neutron [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1026.693564] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944616, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.696225] env[68437]: DEBUG nova.network.neutron [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.783120] env[68437]: DEBUG nova.network.neutron [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Updating instance_info_cache with network_info: [{"id": "d21ad3db-ccd9-4d63-9eb0-4620abdab063", "address": "fa:16:3e:2e:04:04", "network": {"id": "12d67069-fd07-4979-9476-3dc1ea37cd07", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1182471126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68970ec925754b3faeba3b431241ce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd21ad3db-cc", "ovs_interfaceid": "d21ad3db-ccd9-4d63-9eb0-4620abdab063", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.919782] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.919782] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.919782] env[68437]: DEBUG nova.network.neutron [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1027.016898] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.815s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.020052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.667s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.020052] env[68437]: DEBUG nova.objects.instance [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lazy-loading 'resources' on Instance uuid ae32443d-3b55-4bd7-8f07-e66d206ec1d1 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.042603] env[68437]: INFO nova.scheduler.client.report [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Deleted allocations for instance 3a2dad52-63d3-46ec-ac43-3922bca3919e [ 1027.112395] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944614, 'name': Rename_Task, 'duration_secs': 1.156681} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.112687] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.112938] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58918885-f3ca-4c62-b7ae-dec310c3c3b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.119908] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1027.119908] env[68437]: value = "task-2944617" [ 1027.119908] env[68437]: _type = "Task" [ 1027.119908] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.127907] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944617, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.191225] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944616, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.198872] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Releasing lock "refresh_cache-e51356e4-7647-4678-bb4f-f069b5c7fef6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.199352] env[68437]: DEBUG nova.compute.manager [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1027.199602] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.200435] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d33e918-84a9-44ba-bc72-05beac7a175d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.208233] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.208481] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebbc9c7a-51c0-4f1f-a7c1-4c67a79b87f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.216287] env[68437]: DEBUG oslo_vmware.api [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1027.216287] env[68437]: value = "task-2944618" [ 1027.216287] env[68437]: _type = "Task" [ 1027.216287] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.221793] env[68437]: DEBUG nova.compute.manager [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1027.227058] env[68437]: DEBUG oslo_vmware.api [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.247941] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1027.248216] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1027.248382] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1027.248591] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1027.248755] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1027.248902] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1027.249128] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1027.249293] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1027.249459] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1027.249617] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1027.249789] env[68437]: DEBUG nova.virt.hardware [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1027.250760] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b47e073c-ee6a-42ab-9c07-f63f4e6b200d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.259226] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a053f11-51e1-46f6-ae53-800ada74520c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.285353] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Releasing lock "refresh_cache-ccad008b-0a3a-4234-9c4c-c3a5230a938e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.285712] env[68437]: DEBUG nova.compute.manager [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Instance network_info: |[{"id": "d21ad3db-ccd9-4d63-9eb0-4620abdab063", "address": "fa:16:3e:2e:04:04", "network": {"id": "12d67069-fd07-4979-9476-3dc1ea37cd07", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1182471126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68970ec925754b3faeba3b431241ce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd21ad3db-cc", "ovs_interfaceid": "d21ad3db-ccd9-4d63-9eb0-4620abdab063", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1027.286127] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:04:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd21ad3db-ccd9-4d63-9eb0-4620abdab063', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.293538] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Creating folder: Project (68970ec925754b3faeba3b431241ce29). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1027.293786] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ced7dae8-a905-498e-aabe-9fbed5315788 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.308024] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Created folder: Project (68970ec925754b3faeba3b431241ce29) in parent group-v590848. [ 1027.308268] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Creating folder: Instances. Parent ref: group-v591064. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1027.309118] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-568661dc-2aa9-4d94-9969-a6604337e2bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.321343] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Created folder: Instances in parent group-v591064. [ 1027.321672] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1027.321878] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1027.322121] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59a72421-dbaa-4a90-ae9d-3a588ef26303 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.345824] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.345824] env[68437]: value = "task-2944621" [ 1027.345824] env[68437]: _type = "Task" [ 1027.345824] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.355074] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944621, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.462797] env[68437]: WARNING nova.network.neutron [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] 6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88 already exists in list: networks containing: ['6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88']. ignoring it [ 1027.463177] env[68437]: WARNING nova.network.neutron [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] 6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88 already exists in list: networks containing: ['6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88']. ignoring it [ 1027.540727] env[68437]: DEBUG nova.compute.manager [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Received event network-changed-d21ad3db-ccd9-4d63-9eb0-4620abdab063 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1027.540787] env[68437]: DEBUG nova.compute.manager [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Refreshing instance network info cache due to event network-changed-d21ad3db-ccd9-4d63-9eb0-4620abdab063. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1027.540985] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] Acquiring lock "refresh_cache-ccad008b-0a3a-4234-9c4c-c3a5230a938e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.541161] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] Acquired lock "refresh_cache-ccad008b-0a3a-4234-9c4c-c3a5230a938e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.541311] env[68437]: DEBUG nova.network.neutron [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Refreshing network info cache for port d21ad3db-ccd9-4d63-9eb0-4620abdab063 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1027.551951] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1ba1dce0-f86f-49bd-b2ac-25dbacb6cf44 tempest-ImagesOneServerTestJSON-55035735 tempest-ImagesOneServerTestJSON-55035735-project-member] Lock "3a2dad52-63d3-46ec-ac43-3922bca3919e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.632s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.632078] env[68437]: DEBUG oslo_vmware.api [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944617, 'name': PowerOnVM_Task, 'duration_secs': 0.510923} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.634923] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1027.635173] env[68437]: INFO nova.compute.manager [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Took 7.15 seconds to spawn the instance on the hypervisor. [ 1027.635451] env[68437]: DEBUG nova.compute.manager [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1027.636296] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f205fb7f-bac8-421f-8dd3-8ad81e5a95aa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.691656] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944616, 'name': ReconfigVM_Task, 'duration_secs': 1.311929} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.694591] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfigured VM instance instance-0000002d to detach disk 2002 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1027.694799] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.697376] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-477ead98-8bc8-4080-8b3b-aa2116ab3b3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.706036] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1027.706036] env[68437]: value = "task-2944622" [ 1027.706036] env[68437]: _type = "Task" [ 1027.706036] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.718217] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.730208] env[68437]: DEBUG oslo_vmware.api [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944618, 'name': PowerOffVM_Task, 'duration_secs': 0.266167} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.730485] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1027.731617] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1027.731617] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1289c19b-9641-4786-933d-ef64ef321130 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.761637] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1027.762179] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1027.762504] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Deleting the datastore file [datastore2] e51356e4-7647-4678-bb4f-f069b5c7fef6 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1027.765833] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1447395b-5215-4863-b3e1-0b74f5af4bde {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.774528] env[68437]: DEBUG oslo_vmware.api [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for the task: (returnval){ [ 1027.774528] env[68437]: value = "task-2944624" [ 1027.774528] env[68437]: _type = "Task" [ 1027.774528] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.791642] env[68437]: DEBUG oslo_vmware.api [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944624, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.797432] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11b4f5b-7c37-465f-ba40-11a5e9245aa0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.806485] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258b414d-bca4-4877-8212-c9f1606710b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.842049] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1d0d7c-b247-475b-b1a0-cb421b985868 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.854844] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc4f738-a5f8-4672-81f8-edbf99bd3bb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.861855] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944621, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.872517] env[68437]: DEBUG nova.compute.provider_tree [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.933155] env[68437]: DEBUG nova.network.neutron [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Successfully updated port: 797183d0-327b-4e58-9355-0ba9d8beecdc {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1028.159906] env[68437]: DEBUG nova.network.neutron [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [{"id": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "address": "fa:16:3e:ed:52:c2", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd525d9d-aa", "ovs_interfaceid": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6ccc745c-15f0-4593-b5f9-a8bab6edf0d6", "address": "fa:16:3e:c9:14:75", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ccc745c-15", "ovs_interfaceid": "6ccc745c-15f0-4593-b5f9-a8bab6edf0d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "834e4e03-93e8-487e-bb7d-d4774e7092d7", "address": "fa:16:3e:bb:a2:71", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap834e4e03-93", "ovs_interfaceid": "834e4e03-93e8-487e-bb7d-d4774e7092d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.161119] env[68437]: INFO nova.compute.manager [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Took 18.48 seconds to build instance. [ 1028.224288] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944622, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.285484] env[68437]: DEBUG oslo_vmware.api [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Task: {'id': task-2944624, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.423336} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.285878] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.286029] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.286209] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.286388] env[68437]: INFO nova.compute.manager [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1028.286803] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1028.287013] env[68437]: DEBUG nova.compute.manager [-] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1028.287177] env[68437]: DEBUG nova.network.neutron [-] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1028.300574] env[68437]: DEBUG nova.network.neutron [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Updated VIF entry in instance network info cache for port d21ad3db-ccd9-4d63-9eb0-4620abdab063. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1028.300903] env[68437]: DEBUG nova.network.neutron [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Updating instance_info_cache with network_info: [{"id": "d21ad3db-ccd9-4d63-9eb0-4620abdab063", "address": "fa:16:3e:2e:04:04", "network": {"id": "12d67069-fd07-4979-9476-3dc1ea37cd07", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1182471126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68970ec925754b3faeba3b431241ce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd21ad3db-cc", "ovs_interfaceid": "d21ad3db-ccd9-4d63-9eb0-4620abdab063", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.306861] env[68437]: DEBUG nova.network.neutron [-] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1028.357101] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944621, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.376266] env[68437]: DEBUG nova.scheduler.client.report [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1028.435355] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquiring lock "refresh_cache-bd3721bf-74fb-41b3-8090-1b370c0ea9fb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.435512] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquired lock "refresh_cache-bd3721bf-74fb-41b3-8090-1b370c0ea9fb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.435661] env[68437]: DEBUG nova.network.neutron [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1028.662295] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.662974] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.663247] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.663661] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ed448e53-2c93-4fcc-95bf-7a5d33647ba3 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.997s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.664598] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99c9f51-da74-48b8-bd4d-3406063cb06b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.684242] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1028.684400] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1028.684594] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1028.684717] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1028.685544] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1028.685544] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1028.685544] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1028.685544] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1028.685716] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1028.685786] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1028.685938] env[68437]: DEBUG nova.virt.hardware [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1028.692258] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Reconfiguring VM to attach interface {{(pid=68437) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1028.692602] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-854a40aa-2245-410e-9caf-b01f984246b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.714090] env[68437]: DEBUG oslo_vmware.api [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1028.714090] env[68437]: value = "task-2944625" [ 1028.714090] env[68437]: _type = "Task" [ 1028.714090] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.729331] env[68437]: DEBUG oslo_vmware.api [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944625, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.733720] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944622, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.803494] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] Releasing lock "refresh_cache-ccad008b-0a3a-4234-9c4c-c3a5230a938e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.803868] env[68437]: DEBUG nova.compute.manager [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-vif-plugged-834e4e03-93e8-487e-bb7d-d4774e7092d7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1028.803981] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.804213] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.804377] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.804638] env[68437]: DEBUG nova.compute.manager [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] No waiting events found dispatching network-vif-plugged-834e4e03-93e8-487e-bb7d-d4774e7092d7 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1028.804732] env[68437]: WARNING nova.compute.manager [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received unexpected event network-vif-plugged-834e4e03-93e8-487e-bb7d-d4774e7092d7 for instance with vm_state active and task_state None. [ 1028.804867] env[68437]: DEBUG nova.compute.manager [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-changed-834e4e03-93e8-487e-bb7d-d4774e7092d7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1028.805031] env[68437]: DEBUG nova.compute.manager [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Refreshing instance network info cache due to event network-changed-834e4e03-93e8-487e-bb7d-d4774e7092d7. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1028.805286] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] Acquiring lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.805437] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] Acquired lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.805598] env[68437]: DEBUG nova.network.neutron [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Refreshing network info cache for port 834e4e03-93e8-487e-bb7d-d4774e7092d7 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1028.808687] env[68437]: DEBUG nova.network.neutron [-] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.856915] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944621, 'name': CreateVM_Task, 'duration_secs': 1.428739} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.857462] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1028.858204] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.858397] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.858753] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1028.859426] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-247a5f97-04c0-482c-b787-59985a26bd6a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.864610] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1028.864610] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52fffd97-d1db-2967-b657-2e7318605b81" [ 1028.864610] env[68437]: _type = "Task" [ 1028.864610] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.873593] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fffd97-d1db-2967-b657-2e7318605b81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.881600] env[68437]: DEBUG oslo_concurrency.lockutils [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.863s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.884086] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.227s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.885920] env[68437]: INFO nova.compute.claims [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.915970] env[68437]: INFO nova.scheduler.client.report [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Deleted allocations for instance ae32443d-3b55-4bd7-8f07-e66d206ec1d1 [ 1028.979921] env[68437]: DEBUG nova.network.neutron [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1029.095543] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.095543] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.095543] env[68437]: DEBUG nova.compute.manager [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.096919] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38ae5bd-4c4f-4ae3-bdc4-f964499ee20f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.105312] env[68437]: DEBUG nova.compute.manager [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1029.105904] env[68437]: DEBUG nova.objects.instance [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lazy-loading 'flavor' on Instance uuid f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.148490] env[68437]: DEBUG nova.network.neutron [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Updating instance_info_cache with network_info: [{"id": "797183d0-327b-4e58-9355-0ba9d8beecdc", "address": "fa:16:3e:5c:ab:3d", "network": {"id": "892a1fe8-810d-4830-98d0-894fc3262705", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-231395990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "440e76accc0f4841844dafa8075cfc20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap797183d0-32", "ovs_interfaceid": "797183d0-327b-4e58-9355-0ba9d8beecdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.220953] env[68437]: DEBUG oslo_vmware.api [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944622, 'name': PowerOnVM_Task, 'duration_secs': 1.351604} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.221579] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.221811] env[68437]: DEBUG nova.compute.manager [None req-609dea77-717f-4d73-89fb-265e048d1f71 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.222566] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d32ecea-1220-4914-93e7-42e55271a2eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.228020] env[68437]: DEBUG oslo_vmware.api [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.311202] env[68437]: INFO nova.compute.manager [-] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Took 1.02 seconds to deallocate network for instance. [ 1029.376772] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fffd97-d1db-2967-b657-2e7318605b81, 'name': SearchDatastore_Task, 'duration_secs': 0.012382} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.377107] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.377332] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.377566] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.377703] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.377877] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.378169] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63587089-7d99-40a7-8b3f-36bc95b9b953 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.388606] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.388793] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1029.392147] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cefb1b5-52a1-4a3f-a212-d4e1c90ed94a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.399040] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1029.399040] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52af5e87-19bb-24e0-29b0-d04e8697f395" [ 1029.399040] env[68437]: _type = "Task" [ 1029.399040] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.410666] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52af5e87-19bb-24e0-29b0-d04e8697f395, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.427423] env[68437]: DEBUG oslo_concurrency.lockutils [None req-455dd709-8ee2-46fe-a301-be56a2c04765 tempest-ImagesTestJSON-413382143 tempest-ImagesTestJSON-413382143-project-member] Lock "ae32443d-3b55-4bd7-8f07-e66d206ec1d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.607s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.559664] env[68437]: DEBUG nova.network.neutron [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updated VIF entry in instance network info cache for port 834e4e03-93e8-487e-bb7d-d4774e7092d7. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1029.560235] env[68437]: DEBUG nova.network.neutron [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [{"id": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "address": "fa:16:3e:ed:52:c2", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd525d9d-aa", "ovs_interfaceid": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6ccc745c-15f0-4593-b5f9-a8bab6edf0d6", "address": "fa:16:3e:c9:14:75", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ccc745c-15", "ovs_interfaceid": "6ccc745c-15f0-4593-b5f9-a8bab6edf0d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "834e4e03-93e8-487e-bb7d-d4774e7092d7", "address": "fa:16:3e:bb:a2:71", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap834e4e03-93", "ovs_interfaceid": "834e4e03-93e8-487e-bb7d-d4774e7092d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.651179] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Releasing lock "refresh_cache-bd3721bf-74fb-41b3-8090-1b370c0ea9fb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.652056] env[68437]: DEBUG nova.compute.manager [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Instance network_info: |[{"id": "797183d0-327b-4e58-9355-0ba9d8beecdc", "address": "fa:16:3e:5c:ab:3d", "network": {"id": "892a1fe8-810d-4830-98d0-894fc3262705", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-231395990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "440e76accc0f4841844dafa8075cfc20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap797183d0-32", "ovs_interfaceid": "797183d0-327b-4e58-9355-0ba9d8beecdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1029.652056] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:ab:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e51ebca-e0f8-4b77-b155-4ff928eef130', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '797183d0-327b-4e58-9355-0ba9d8beecdc', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1029.659301] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Creating folder: Project (440e76accc0f4841844dafa8075cfc20). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1029.659883] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d3c0141-c42d-4094-ad2f-1ba58e7df924 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.674038] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Created folder: Project (440e76accc0f4841844dafa8075cfc20) in parent group-v590848. [ 1029.674038] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Creating folder: Instances. Parent ref: group-v591067. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1029.674038] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9fc983ca-7d8e-4180-9727-46d4be52f1a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.685843] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.686149] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Created folder: Instances in parent group-v591067. [ 1029.686397] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.686579] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.688433] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1029.688789] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e47d68b-22b0-4742-af82-76360d252fa9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.707290] env[68437]: DEBUG nova.compute.manager [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Received event network-vif-plugged-797183d0-327b-4e58-9355-0ba9d8beecdc {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1029.707485] env[68437]: DEBUG oslo_concurrency.lockutils [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] Acquiring lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.707770] env[68437]: DEBUG oslo_concurrency.lockutils [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] Lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.707840] env[68437]: DEBUG oslo_concurrency.lockutils [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] Lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.707997] env[68437]: DEBUG nova.compute.manager [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] No waiting events found dispatching network-vif-plugged-797183d0-327b-4e58-9355-0ba9d8beecdc {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1029.708186] env[68437]: WARNING nova.compute.manager [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Received unexpected event network-vif-plugged-797183d0-327b-4e58-9355-0ba9d8beecdc for instance with vm_state building and task_state spawning. [ 1029.709033] env[68437]: DEBUG nova.compute.manager [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Received event network-changed-797183d0-327b-4e58-9355-0ba9d8beecdc {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1029.709033] env[68437]: DEBUG nova.compute.manager [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Refreshing instance network info cache due to event network-changed-797183d0-327b-4e58-9355-0ba9d8beecdc. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1029.709033] env[68437]: DEBUG oslo_concurrency.lockutils [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] Acquiring lock "refresh_cache-bd3721bf-74fb-41b3-8090-1b370c0ea9fb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.709033] env[68437]: DEBUG oslo_concurrency.lockutils [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] Acquired lock "refresh_cache-bd3721bf-74fb-41b3-8090-1b370c0ea9fb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.709033] env[68437]: DEBUG nova.network.neutron [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Refreshing network info cache for port 797183d0-327b-4e58-9355-0ba9d8beecdc {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1029.721867] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1029.721867] env[68437]: value = "task-2944628" [ 1029.721867] env[68437]: _type = "Task" [ 1029.721867] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.729929] env[68437]: DEBUG oslo_vmware.api [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944625, 'name': ReconfigVM_Task, 'duration_secs': 0.643711} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.731125] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.731344] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Reconfigured VM to attach interface {{(pid=68437) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1029.739749] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944628, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.818336] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.910056] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52af5e87-19bb-24e0-29b0-d04e8697f395, 'name': SearchDatastore_Task, 'duration_secs': 0.038743} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.912695] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-836a42eb-0249-46f8-a9eb-44e803047c43 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.917745] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1029.917745] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524a4fab-b08a-ce97-e0ff-c7b4208a9ad4" [ 1029.917745] env[68437]: _type = "Task" [ 1029.917745] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.925888] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524a4fab-b08a-ce97-e0ff-c7b4208a9ad4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.063819] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ce224f7-a9b4-467a-9a26-9f0c88ad9e54 req-2c073a82-9692-4de6-9807-66332db38297 service nova] Releasing lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.112813] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1030.113145] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3691abb4-acaa-40c5-9b3a-4e5a7f3093e1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.124694] env[68437]: DEBUG oslo_vmware.api [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1030.124694] env[68437]: value = "task-2944629" [ 1030.124694] env[68437]: _type = "Task" [ 1030.124694] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.139440] env[68437]: DEBUG oslo_vmware.api [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.195591] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.195800] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.195958] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.196216] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.196368] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.196485] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.196623] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1030.196782] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.216613] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de896457-c18c-491c-8ff5-4e361bd3e0b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.229692] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48b15cd-d5e6-4bd5-ba40-2faa7caaeeee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.236258] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6485c7a6-406f-45c9-a572-cff2756b0157 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-834e4e03-93e8-487e-bb7d-d4774e7092d7" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.799s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.274581] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944628, 'name': CreateVM_Task, 'duration_secs': 0.361922} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.278935] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c07ba05b-52f2-402e-9edf-39c0bd50c83f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.281720] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1030.283210] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.283409] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.283955] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1030.285156] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3cde15b-b8ad-4342-b493-23b8806ce58e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.295224] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64695cc-43bc-4bbd-b5b3-744eec7510f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.304018] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1030.304018] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52000335-686f-cfd6-7b1d-787762e9f67a" [ 1030.304018] env[68437]: _type = "Task" [ 1030.304018] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.315433] env[68437]: DEBUG nova.compute.provider_tree [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.323197] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52000335-686f-cfd6-7b1d-787762e9f67a, 'name': SearchDatastore_Task, 'duration_secs': 0.012499} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.323548] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.323839] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1030.324115] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.430032] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524a4fab-b08a-ce97-e0ff-c7b4208a9ad4, 'name': SearchDatastore_Task, 'duration_secs': 0.011178} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.430339] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.430605] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ccad008b-0a3a-4234-9c4c-c3a5230a938e/ccad008b-0a3a-4234-9c4c-c3a5230a938e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1030.430889] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.431085] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.431421] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24b48536-02c6-40a1-afe5-cd4b6cce5a20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.433505] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70ed9699-2908-4008-925d-4fa3a872510f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.442796] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1030.442796] env[68437]: value = "task-2944630" [ 1030.442796] env[68437]: _type = "Task" [ 1030.442796] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.447121] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.447235] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1030.448367] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f590990-de78-47d2-84cb-6f88806404a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.454384] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944630, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.458511] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1030.458511] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5228ec9e-3a11-aa60-e91b-54b82283420a" [ 1030.458511] env[68437]: _type = "Task" [ 1030.458511] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.468144] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5228ec9e-3a11-aa60-e91b-54b82283420a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.472457] env[68437]: DEBUG nova.network.neutron [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Updated VIF entry in instance network info cache for port 797183d0-327b-4e58-9355-0ba9d8beecdc. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1030.472803] env[68437]: DEBUG nova.network.neutron [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Updating instance_info_cache with network_info: [{"id": "797183d0-327b-4e58-9355-0ba9d8beecdc", "address": "fa:16:3e:5c:ab:3d", "network": {"id": "892a1fe8-810d-4830-98d0-894fc3262705", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-231395990-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "440e76accc0f4841844dafa8075cfc20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap797183d0-32", "ovs_interfaceid": "797183d0-327b-4e58-9355-0ba9d8beecdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.636562] env[68437]: DEBUG oslo_vmware.api [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944629, 'name': PowerOffVM_Task, 'duration_secs': 0.216665} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.636859] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1030.637074] env[68437]: DEBUG nova.compute.manager [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1030.637881] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166c6abc-9d31-407a-8cb1-143c496525ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.700451] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.819255] env[68437]: DEBUG nova.scheduler.client.report [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.954020] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944630, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475833} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.954308] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ccad008b-0a3a-4234-9c4c-c3a5230a938e/ccad008b-0a3a-4234-9c4c-c3a5230a938e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1030.954524] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.954775] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5c512fc-3b4a-4711-a99c-451a25e6ed30 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.963593] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1030.963593] env[68437]: value = "task-2944631" [ 1030.963593] env[68437]: _type = "Task" [ 1030.963593] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.970277] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5228ec9e-3a11-aa60-e91b-54b82283420a, 'name': SearchDatastore_Task, 'duration_secs': 0.010703} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.971761] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1322e873-a61c-4294-89e4-3334b5ee455e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.977599] env[68437]: DEBUG oslo_concurrency.lockutils [req-ffaae5c7-49c4-47a7-81bc-0523ace06403 req-e3db0706-16b0-4268-a835-37d0448c8846 service nova] Releasing lock "refresh_cache-bd3721bf-74fb-41b3-8090-1b370c0ea9fb" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.977994] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944631, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.981794] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1030.981794] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d20be2-f190-0100-a3cc-53a995dbb20c" [ 1030.981794] env[68437]: _type = "Task" [ 1030.981794] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.991974] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d20be2-f190-0100-a3cc-53a995dbb20c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.159352] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3be4579d-761f-4629-807f-f776ab616773 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.063s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.324214] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.440s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.325294] env[68437]: DEBUG nova.compute.manager [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1031.329376] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.006s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.330745] env[68437]: INFO nova.compute.claims [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1031.476956] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944631, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068429} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.477405] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1031.478484] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8424d4-41d6-4368-a1e2-b13747eef04f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.508811] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] ccad008b-0a3a-4234-9c4c-c3a5230a938e/ccad008b-0a3a-4234-9c4c-c3a5230a938e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1031.509630] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c388ce4-339a-4aac-b11a-a522bf9d188e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.532448] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d20be2-f190-0100-a3cc-53a995dbb20c, 'name': SearchDatastore_Task, 'duration_secs': 0.009965} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.534765] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.534765] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] bd3721bf-74fb-41b3-8090-1b370c0ea9fb/bd3721bf-74fb-41b3-8090-1b370c0ea9fb.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1031.535014] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0afae50-2efe-45f7-b827-0306eb52421b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.539487] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1031.539487] env[68437]: value = "task-2944632" [ 1031.539487] env[68437]: _type = "Task" [ 1031.539487] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.544200] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1031.544200] env[68437]: value = "task-2944633" [ 1031.544200] env[68437]: _type = "Task" [ 1031.544200] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.551504] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944632, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.557938] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944633, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.837449] env[68437]: DEBUG nova.compute.utils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1031.843584] env[68437]: DEBUG nova.compute.manager [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1031.843801] env[68437]: DEBUG nova.network.neutron [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1031.895621] env[68437]: DEBUG nova.compute.manager [req-23381d5f-37c9-4083-8a0f-c3da4d35101c req-a1087da5-020c-4413-9a2c-f20bb256a2ed service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Received event network-changed-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1031.895702] env[68437]: DEBUG nova.compute.manager [req-23381d5f-37c9-4083-8a0f-c3da4d35101c req-a1087da5-020c-4413-9a2c-f20bb256a2ed service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Refreshing instance network info cache due to event network-changed-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1031.895948] env[68437]: DEBUG oslo_concurrency.lockutils [req-23381d5f-37c9-4083-8a0f-c3da4d35101c req-a1087da5-020c-4413-9a2c-f20bb256a2ed service nova] Acquiring lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.896242] env[68437]: DEBUG oslo_concurrency.lockutils [req-23381d5f-37c9-4083-8a0f-c3da4d35101c req-a1087da5-020c-4413-9a2c-f20bb256a2ed service nova] Acquired lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.896478] env[68437]: DEBUG nova.network.neutron [req-23381d5f-37c9-4083-8a0f-c3da4d35101c req-a1087da5-020c-4413-9a2c-f20bb256a2ed service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Refreshing network info cache for port d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1031.943221] env[68437]: DEBUG nova.policy [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ff3c9a96f10413f860946488fa85aee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38ad479949b24307b08e16fdb821c76f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1032.056447] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944632, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.062047] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944633, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466225} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.062047] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] bd3721bf-74fb-41b3-8090-1b370c0ea9fb/bd3721bf-74fb-41b3-8090-1b370c0ea9fb.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1032.062483] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1032.062585] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7041a38d-fe4d-463a-8082-1b39d6e195b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.074925] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1032.074925] env[68437]: value = "task-2944634" [ 1032.074925] env[68437]: _type = "Task" [ 1032.074925] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.086374] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944634, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.298175] env[68437]: DEBUG oslo_concurrency.lockutils [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-6ccc745c-15f0-4593-b5f9-a8bab6edf0d6" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.298459] env[68437]: DEBUG oslo_concurrency.lockutils [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-6ccc745c-15f0-4593-b5f9-a8bab6edf0d6" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.344455] env[68437]: DEBUG nova.compute.manager [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1032.374333] env[68437]: DEBUG nova.network.neutron [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Successfully created port: a1c03134-f2fd-4e15-a710-171032761276 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1032.401858] env[68437]: INFO nova.compute.manager [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Rebuilding instance [ 1032.473020] env[68437]: DEBUG nova.compute.manager [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1032.474017] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257762c1-ef77-4537-be92-6f1b2e5231d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.566285] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944632, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.590697] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944634, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.295694} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.592450] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1032.593262] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a328c0-d020-45c8-9141-8be9c7ce4c70 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.626527] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] bd3721bf-74fb-41b3-8090-1b370c0ea9fb/bd3721bf-74fb-41b3-8090-1b370c0ea9fb.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1032.626527] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-615e6e7d-27c6-4d82-b2ae-d9692eef6698 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.657044] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1032.657044] env[68437]: value = "task-2944635" [ 1032.657044] env[68437]: _type = "Task" [ 1032.657044] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.669341] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944635, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.804023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.804023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.804023] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d49151-548e-455c-af6c-7d7857ceb22a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.831059] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3fbc5d-59fe-4c61-81be-7e828d273920 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.860023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Reconfiguring VM to detach interface {{(pid=68437) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1032.864776] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8855f9a-67b8-43d8-b107-eb5d1919b6ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.879349] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161f8170-3345-4acb-b3c8-ac8c2706859d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.882855] env[68437]: DEBUG nova.network.neutron [req-23381d5f-37c9-4083-8a0f-c3da4d35101c req-a1087da5-020c-4413-9a2c-f20bb256a2ed service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updated VIF entry in instance network info cache for port d22dccb0-3e51-43b6-8bfe-4f6b83be5b62. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1032.883329] env[68437]: DEBUG nova.network.neutron [req-23381d5f-37c9-4083-8a0f-c3da4d35101c req-a1087da5-020c-4413-9a2c-f20bb256a2ed service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updating instance_info_cache with network_info: [{"id": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "address": "fa:16:3e:b0:0c:e5", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22dccb0-3e", "ovs_interfaceid": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.892344] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe535d2d-0c28-4bca-9396-1f213fe8fb13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.896061] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1032.896061] env[68437]: value = "task-2944636" [ 1032.896061] env[68437]: _type = "Task" [ 1032.896061] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.927487] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1053d8a-9453-41c4-9c4f-c53b1caba45f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.934322] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.939342] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554df82f-3728-4273-beeb-f5bce5f3d17d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.955211] env[68437]: DEBUG nova.compute.provider_tree [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.055648] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944632, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.167270] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944635, 'name': ReconfigVM_Task, 'duration_secs': 0.398931} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.167601] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Reconfigured VM instance instance-00000050 to attach disk [datastore2] bd3721bf-74fb-41b3-8090-1b370c0ea9fb/bd3721bf-74fb-41b3-8090-1b370c0ea9fb.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1033.168279] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfdf0607-9b1f-4db6-b643-3d0188495f0e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.176650] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1033.176650] env[68437]: value = "task-2944637" [ 1033.176650] env[68437]: _type = "Task" [ 1033.176650] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.188689] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944637, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.388733] env[68437]: DEBUG nova.compute.manager [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1033.390888] env[68437]: DEBUG oslo_concurrency.lockutils [req-23381d5f-37c9-4083-8a0f-c3da4d35101c req-a1087da5-020c-4413-9a2c-f20bb256a2ed service nova] Releasing lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.409252] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.419754] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.420028] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.420185] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.420423] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.420838] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.420838] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.420958] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.425315] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.425463] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.425646] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.425829] env[68437]: DEBUG nova.virt.hardware [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.427463] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38546d33-d617-4042-9021-7019e4f27092 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.440339] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65688e88-9353-46d2-bd86-9e90636b41d5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.457789] env[68437]: DEBUG nova.scheduler.client.report [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1033.497219] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1033.497219] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11774138-85f4-46d6-ad22-a00773a560e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.507116] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1033.507116] env[68437]: value = "task-2944638" [ 1033.507116] env[68437]: _type = "Task" [ 1033.507116] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.519849] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1033.520082] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1033.520804] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bfbc093-d3ee-4afc-9c6b-513423d38724 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.530049] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1033.530306] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51be39da-0a9d-4fbe-88c8-7bb0c154e16a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.559179] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944632, 'name': ReconfigVM_Task, 'duration_secs': 1.988373} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.559473] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Reconfigured VM instance instance-0000004f to attach disk [datastore2] ccad008b-0a3a-4234-9c4c-c3a5230a938e/ccad008b-0a3a-4234-9c4c-c3a5230a938e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1033.560120] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77b555b4-23eb-4a40-8650-84d6353984a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.568536] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1033.568536] env[68437]: value = "task-2944640" [ 1033.568536] env[68437]: _type = "Task" [ 1033.568536] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.577642] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944640, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.614080] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1033.614401] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1033.617857] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleting the datastore file [datastore2] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1033.617857] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c83e0e83-3b3d-4365-b0e6-3e66562415d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.623616] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1033.623616] env[68437]: value = "task-2944641" [ 1033.623616] env[68437]: _type = "Task" [ 1033.623616] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.637566] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.687320] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944637, 'name': Rename_Task, 'duration_secs': 0.155483} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.690070] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.690070] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e41cfcec-4646-4142-9175-067a6fab09b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.695727] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1033.695727] env[68437]: value = "task-2944642" [ 1033.695727] env[68437]: _type = "Task" [ 1033.695727] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.704661] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.913294] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.952891] env[68437]: DEBUG nova.compute.manager [req-baa66321-1dd5-470f-935a-eb1886f66032 req-d02264c7-1fc4-4944-98b1-e0bee8c0ffa3 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Received event network-changed-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1033.953165] env[68437]: DEBUG nova.compute.manager [req-baa66321-1dd5-470f-935a-eb1886f66032 req-d02264c7-1fc4-4944-98b1-e0bee8c0ffa3 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Refreshing instance network info cache due to event network-changed-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1033.953355] env[68437]: DEBUG oslo_concurrency.lockutils [req-baa66321-1dd5-470f-935a-eb1886f66032 req-d02264c7-1fc4-4944-98b1-e0bee8c0ffa3 service nova] Acquiring lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.953501] env[68437]: DEBUG oslo_concurrency.lockutils [req-baa66321-1dd5-470f-935a-eb1886f66032 req-d02264c7-1fc4-4944-98b1-e0bee8c0ffa3 service nova] Acquired lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.953660] env[68437]: DEBUG nova.network.neutron [req-baa66321-1dd5-470f-935a-eb1886f66032 req-d02264c7-1fc4-4944-98b1-e0bee8c0ffa3 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Refreshing network info cache for port d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1033.963657] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.963657] env[68437]: DEBUG nova.compute.manager [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1033.967145] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.772s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.084934] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944640, 'name': Rename_Task, 'duration_secs': 0.21035} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.085253] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1034.085506] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9eb636b7-87ec-4a5d-9708-8a1ce533f5cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.093557] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1034.093557] env[68437]: value = "task-2944643" [ 1034.093557] env[68437]: _type = "Task" [ 1034.093557] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.104437] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944643, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.136240] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23288} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.136240] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1034.136240] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1034.136240] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1034.185449] env[68437]: DEBUG nova.compute.manager [req-457d6630-71b2-42f2-9829-aa8ccbfe3651 req-28f7ce63-9f2e-48ce-b404-7d0639dedfaa service nova] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Received event network-vif-plugged-a1c03134-f2fd-4e15-a710-171032761276 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1034.185688] env[68437]: DEBUG oslo_concurrency.lockutils [req-457d6630-71b2-42f2-9829-aa8ccbfe3651 req-28f7ce63-9f2e-48ce-b404-7d0639dedfaa service nova] Acquiring lock "75a9fb57-5796-4853-b429-6e8ea7aba1de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.185896] env[68437]: DEBUG oslo_concurrency.lockutils [req-457d6630-71b2-42f2-9829-aa8ccbfe3651 req-28f7ce63-9f2e-48ce-b404-7d0639dedfaa service nova] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.186452] env[68437]: DEBUG oslo_concurrency.lockutils [req-457d6630-71b2-42f2-9829-aa8ccbfe3651 req-28f7ce63-9f2e-48ce-b404-7d0639dedfaa service nova] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.186813] env[68437]: DEBUG nova.compute.manager [req-457d6630-71b2-42f2-9829-aa8ccbfe3651 req-28f7ce63-9f2e-48ce-b404-7d0639dedfaa service nova] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] No waiting events found dispatching network-vif-plugged-a1c03134-f2fd-4e15-a710-171032761276 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.186885] env[68437]: WARNING nova.compute.manager [req-457d6630-71b2-42f2-9829-aa8ccbfe3651 req-28f7ce63-9f2e-48ce-b404-7d0639dedfaa service nova] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Received unexpected event network-vif-plugged-a1c03134-f2fd-4e15-a710-171032761276 for instance with vm_state building and task_state spawning. [ 1034.210521] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944642, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.379142] env[68437]: DEBUG nova.network.neutron [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Successfully updated port: a1c03134-f2fd-4e15-a710-171032761276 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.409435] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.474286] env[68437]: DEBUG nova.compute.utils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1034.478861] env[68437]: DEBUG nova.compute.manager [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1034.478861] env[68437]: DEBUG nova.network.neutron [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1034.576170] env[68437]: DEBUG nova.policy [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18019a39c85744c78fc36dd6117faab9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b16a7e41c7f648dfb17033f16e092f5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1034.611669] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944643, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.709212] env[68437]: DEBUG oslo_vmware.api [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944642, 'name': PowerOnVM_Task, 'duration_secs': 0.619164} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.710188] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1034.710188] env[68437]: INFO nova.compute.manager [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Took 7.49 seconds to spawn the instance on the hypervisor. [ 1034.710379] env[68437]: DEBUG nova.compute.manager [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.711604] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b318cb84-3271-4895-b876-d3b373b59c22 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.716216] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673dfb20-224c-4e0e-bd4d-7bdfb80aac54 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.727440] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a71c4d2-507b-4515-a462-a7876d7515d5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.761777] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86a8bde-31b0-4e22-abb4-c8cc7d6bec67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.770547] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d85e85d-20c7-4ff9-976a-2b637c6effb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.785629] env[68437]: DEBUG nova.compute.provider_tree [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.883145] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-75a9fb57-5796-4853-b429-6e8ea7aba1de" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.883470] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-75a9fb57-5796-4853-b429-6e8ea7aba1de" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.884554] env[68437]: DEBUG nova.network.neutron [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1034.913462] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.981844] env[68437]: DEBUG nova.compute.manager [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1034.986231] env[68437]: DEBUG nova.network.neutron [req-baa66321-1dd5-470f-935a-eb1886f66032 req-d02264c7-1fc4-4944-98b1-e0bee8c0ffa3 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updated VIF entry in instance network info cache for port d22dccb0-3e51-43b6-8bfe-4f6b83be5b62. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1034.986642] env[68437]: DEBUG nova.network.neutron [req-baa66321-1dd5-470f-935a-eb1886f66032 req-d02264c7-1fc4-4944-98b1-e0bee8c0ffa3 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updating instance_info_cache with network_info: [{"id": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "address": "fa:16:3e:b0:0c:e5", "network": {"id": "fe5c8b7d-6242-429a-a6e7-8e292212258b", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-259497282-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b002244273f41d89ddf47570ffe6a02", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd22dccb0-3e", "ovs_interfaceid": "d22dccb0-3e51-43b6-8bfe-4f6b83be5b62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.102748] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944643, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.182091] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1035.182439] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1035.183134] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1035.183368] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1035.183542] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1035.183727] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1035.184637] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1035.184637] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1035.184637] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1035.184637] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1035.184929] env[68437]: DEBUG nova.virt.hardware [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1035.186064] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1076ff-d0fd-491e-a465-ed1ffd663b18 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.196021] env[68437]: DEBUG nova.network.neutron [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Successfully created port: 2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1035.198176] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e510387c-08c8-4799-ada9-cdda2b8e9cbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.212649] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:96:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6e03f32-de4c-4405-9930-c4a70c9d560d', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1035.220545] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1035.220799] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1035.221435] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-307d5dbe-3f6d-403a-90a8-7dd03dd02c0c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.242818] env[68437]: INFO nova.compute.manager [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Took 18.88 seconds to build instance. [ 1035.245251] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.245251] env[68437]: value = "task-2944644" [ 1035.245251] env[68437]: _type = "Task" [ 1035.245251] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.259994] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944644, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.288522] env[68437]: DEBUG nova.scheduler.client.report [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.410176] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.452750] env[68437]: DEBUG nova.network.neutron [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1035.493855] env[68437]: DEBUG oslo_concurrency.lockutils [req-baa66321-1dd5-470f-935a-eb1886f66032 req-d02264c7-1fc4-4944-98b1-e0bee8c0ffa3 service nova] Releasing lock "refresh_cache-a01364f9-e30d-4140-ae41-1e7c4aaa2251" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.606882] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944643, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.742518] env[68437]: DEBUG nova.network.neutron [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Updating instance_info_cache with network_info: [{"id": "a1c03134-f2fd-4e15-a710-171032761276", "address": "fa:16:3e:81:dc:a1", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1c03134-f2", "ovs_interfaceid": "a1c03134-f2fd-4e15-a710-171032761276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.745652] env[68437]: DEBUG oslo_concurrency.lockutils [None req-479f4ffb-b233-4345-ad2d-f2195883ff8e tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.392s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.763944] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944644, 'name': CreateVM_Task, 'duration_secs': 0.307617} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.763944] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.764144] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.764291] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.764614] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1035.765186] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b39cf3bf-5fbe-4735-8134-e22ef72fec16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.769781] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1035.769781] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525bb4b5-eb7b-7ee3-128c-292c718164b7" [ 1035.769781] env[68437]: _type = "Task" [ 1035.769781] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.778474] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525bb4b5-eb7b-7ee3-128c-292c718164b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.910884] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.995880] env[68437]: DEBUG nova.compute.manager [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1036.037453] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1036.037708] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1036.037857] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1036.038196] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1036.038397] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1036.038553] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1036.042020] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1036.042020] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1036.042020] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1036.042020] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1036.042020] env[68437]: DEBUG nova.virt.hardware [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1036.042020] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d895dd7-1abf-4dcb-a6b8-28d821d44930 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.052000] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a4f869-51d1-4bde-8390-ede7971a588d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.105248] env[68437]: DEBUG oslo_vmware.api [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944643, 'name': PowerOnVM_Task, 'duration_secs': 1.569438} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.105542] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1036.105748] env[68437]: INFO nova.compute.manager [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Took 11.31 seconds to spawn the instance on the hypervisor. [ 1036.105927] env[68437]: DEBUG nova.compute.manager [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1036.107069] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce5955f-2285-4f42-88ef-e97170369d3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.244106] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-75a9fb57-5796-4853-b429-6e8ea7aba1de" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.244455] env[68437]: DEBUG nova.compute.manager [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Instance network_info: |[{"id": "a1c03134-f2fd-4e15-a710-171032761276", "address": "fa:16:3e:81:dc:a1", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1c03134-f2", "ovs_interfaceid": "a1c03134-f2fd-4e15-a710-171032761276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1036.244876] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:dc:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1c03134-f2fd-4e15-a710-171032761276', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.252791] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1036.253072] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.254028] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7f85b3f-4272-405a-a9b3-d645229fa8d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.279402] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.279402] env[68437]: value = "task-2944645" [ 1036.279402] env[68437]: _type = "Task" [ 1036.279402] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.284298] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525bb4b5-eb7b-7ee3-128c-292c718164b7, 'name': SearchDatastore_Task, 'duration_secs': 0.013052} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.287594] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.287850] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.288218] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.289033] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.289033] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.289033] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-959d7360-ffa7-422a-a00f-a4845acf965c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.295683] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944645, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.300049] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.300049] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.300049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.332s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.303351] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dcb4fad-49eb-473b-ae42-0afcb00d4aaf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.306167] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.488s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.306543] env[68437]: DEBUG nova.objects.instance [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lazy-loading 'resources' on Instance uuid e51356e4-7647-4678-bb4f-f069b5c7fef6 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.312221] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1036.312221] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52577780-f991-3163-8af7-398d14f8e8db" [ 1036.312221] env[68437]: _type = "Task" [ 1036.312221] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.321810] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52577780-f991-3163-8af7-398d14f8e8db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.107618] env[68437]: DEBUG oslo_concurrency.lockutils [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquiring lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.107618] env[68437]: DEBUG oslo_concurrency.lockutils [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.107940] env[68437]: DEBUG oslo_concurrency.lockutils [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquiring lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.107940] env[68437]: DEBUG oslo_concurrency.lockutils [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.108129] env[68437]: DEBUG oslo_concurrency.lockutils [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.110112] env[68437]: DEBUG nova.compute.manager [req-2d6d03b3-8eab-4b3b-9eb9-0f8b424205d2 req-54508be8-cf63-4ed1-b67d-20464a1c24fe service nova] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Received event network-changed-a1c03134-f2fd-4e15-a710-171032761276 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1037.110347] env[68437]: DEBUG nova.compute.manager [req-2d6d03b3-8eab-4b3b-9eb9-0f8b424205d2 req-54508be8-cf63-4ed1-b67d-20464a1c24fe service nova] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Refreshing instance network info cache due to event network-changed-a1c03134-f2fd-4e15-a710-171032761276. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1037.110550] env[68437]: DEBUG oslo_concurrency.lockutils [req-2d6d03b3-8eab-4b3b-9eb9-0f8b424205d2 req-54508be8-cf63-4ed1-b67d-20464a1c24fe service nova] Acquiring lock "refresh_cache-75a9fb57-5796-4853-b429-6e8ea7aba1de" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.110724] env[68437]: DEBUG oslo_concurrency.lockutils [req-2d6d03b3-8eab-4b3b-9eb9-0f8b424205d2 req-54508be8-cf63-4ed1-b67d-20464a1c24fe service nova] Acquired lock "refresh_cache-75a9fb57-5796-4853-b429-6e8ea7aba1de" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.110837] env[68437]: DEBUG nova.network.neutron [req-2d6d03b3-8eab-4b3b-9eb9-0f8b424205d2 req-54508be8-cf63-4ed1-b67d-20464a1c24fe service nova] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Refreshing network info cache for port a1c03134-f2fd-4e15-a710-171032761276 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1037.115222] env[68437]: INFO nova.compute.manager [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Terminating instance [ 1037.135575] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944645, 'name': CreateVM_Task, 'duration_secs': 0.358344} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.142439] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.143299] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52577780-f991-3163-8af7-398d14f8e8db, 'name': SearchDatastore_Task, 'duration_secs': 0.010837} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.143511] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.144171] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.144329] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.144630] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1037.148246] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f08c2ab-29ef-4b76-a9a4-21a763d69321 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.149911] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d324ca39-3bbc-475d-9856-a9c59c7fde56 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.158228] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1037.158228] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b3f62d-e6b5-4815-fe81-a2c94be938a6" [ 1037.158228] env[68437]: _type = "Task" [ 1037.158228] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.159990] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1037.159990] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5290cd95-75af-bcfc-5af8-ac1afd4613de" [ 1037.159990] env[68437]: _type = "Task" [ 1037.159990] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.160438] env[68437]: INFO nova.compute.manager [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Took 23.10 seconds to build instance. [ 1037.177478] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b3f62d-e6b5-4815-fe81-a2c94be938a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.177478] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5290cd95-75af-bcfc-5af8-ac1afd4613de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.180893] env[68437]: DEBUG nova.network.neutron [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Successfully updated port: 2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1037.214696] env[68437]: INFO nova.scheduler.client.report [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted allocation for migration cc5d69af-8701-41e6-9eac-99856916861a [ 1037.393475] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a138e396-77c6-44a3-9493-789f57f40491 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.401054] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9f6d96-18cd-4203-a0d1-18c50cd71359 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.437355] env[68437]: DEBUG nova.network.neutron [req-2d6d03b3-8eab-4b3b-9eb9-0f8b424205d2 req-54508be8-cf63-4ed1-b67d-20464a1c24fe service nova] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Updated VIF entry in instance network info cache for port a1c03134-f2fd-4e15-a710-171032761276. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1037.437770] env[68437]: DEBUG nova.network.neutron [req-2d6d03b3-8eab-4b3b-9eb9-0f8b424205d2 req-54508be8-cf63-4ed1-b67d-20464a1c24fe service nova] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Updating instance_info_cache with network_info: [{"id": "a1c03134-f2fd-4e15-a710-171032761276", "address": "fa:16:3e:81:dc:a1", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1c03134-f2", "ovs_interfaceid": "a1c03134-f2fd-4e15-a710-171032761276", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.439380] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1071d514-8c50-4ba7-be26-3cb30938c36a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.447143] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e4bcee-1438-4ce3-ae28-e7b818903c17 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.465314] env[68437]: DEBUG nova.compute.provider_tree [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.609575] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.631591] env[68437]: DEBUG nova.compute.manager [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1037.631881] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1037.634046] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399466d0-8ae3-405f-af8e-c783672306ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.642013] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1037.642258] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2de5504b-acad-4749-ba3a-9746cb768dbe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.649099] env[68437]: DEBUG oslo_vmware.api [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1037.649099] env[68437]: value = "task-2944646" [ 1037.649099] env[68437]: _type = "Task" [ 1037.649099] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.656869] env[68437]: DEBUG oslo_vmware.api [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.671133] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0d4121ed-492f-4aa7-b726-48a480dedfbd tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.657s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.672380] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b3f62d-e6b5-4815-fe81-a2c94be938a6, 'name': SearchDatastore_Task, 'duration_secs': 0.016148} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.672380] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.672380] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1037.672380] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.677278] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5290cd95-75af-bcfc-5af8-ac1afd4613de, 'name': SearchDatastore_Task, 'duration_secs': 0.020954} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.677521] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.677771] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28/f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.678051] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.678240] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.678451] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47fbb552-1ae2-4e6a-9809-2016f670aec4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.680305] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c892833-b02e-40b1-9cca-0dd0cf91488f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.683014] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquiring lock "refresh_cache-33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.683160] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquired lock "refresh_cache-33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.683320] env[68437]: DEBUG nova.network.neutron [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1037.686479] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1037.686479] env[68437]: value = "task-2944647" [ 1037.686479] env[68437]: _type = "Task" [ 1037.686479] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.690476] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.690641] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1037.691654] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de1b018f-be28-40e3-aac5-abdbc8f98507 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.697998] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944647, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.701749] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1037.701749] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529dd195-9bf0-0683-6022-1cee878e0265" [ 1037.701749] env[68437]: _type = "Task" [ 1037.701749] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.710171] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529dd195-9bf0-0683-6022-1cee878e0265, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.719832] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23c278d1-402a-43e8-9427-07cfa1c0a73a tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 16.148s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.877791] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.878303] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.878689] env[68437]: INFO nova.compute.manager [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Rebooting instance [ 1037.943276] env[68437]: DEBUG oslo_concurrency.lockutils [req-2d6d03b3-8eab-4b3b-9eb9-0f8b424205d2 req-54508be8-cf63-4ed1-b67d-20464a1c24fe service nova] Releasing lock "refresh_cache-75a9fb57-5796-4853-b429-6e8ea7aba1de" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.972121] env[68437]: DEBUG nova.scheduler.client.report [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.115272] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.165742] env[68437]: DEBUG oslo_vmware.api [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944646, 'name': PowerOffVM_Task, 'duration_secs': 0.192486} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.166063] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1038.166970] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1038.166970] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9d8f7f8-5ad9-49fb-a32e-b31e32ba9132 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.200774] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944647, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507042} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.201069] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28/f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1038.201833] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.201833] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58e14ccf-99c2-4797-99c3-05b75db80190 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.213589] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529dd195-9bf0-0683-6022-1cee878e0265, 'name': SearchDatastore_Task, 'duration_secs': 0.010048} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.216052] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1038.216052] env[68437]: value = "task-2944649" [ 1038.216052] env[68437]: _type = "Task" [ 1038.216052] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.216334] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0250f4a-0bd0-4d84-bf3f-3cccb80e1ed6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.227568] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1038.227568] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b5dc69-eae1-7ff0-c427-940e5716ce18" [ 1038.227568] env[68437]: _type = "Task" [ 1038.227568] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.231956] env[68437]: DEBUG nova.network.neutron [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1038.234337] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944649, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.245195] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b5dc69-eae1-7ff0-c427-940e5716ce18, 'name': SearchDatastore_Task, 'duration_secs': 0.011235} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.245634] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.246069] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 75a9fb57-5796-4853-b429-6e8ea7aba1de/75a9fb57-5796-4853-b429-6e8ea7aba1de.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1038.246865] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-856d1252-ae83-4459-b506-d61d57b31467 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.249613] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1038.249876] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1038.250104] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Deleting the datastore file [datastore2] bd3721bf-74fb-41b3-8090-1b370c0ea9fb {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1038.250807] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77de30a9-9335-4365-a80a-badea6c28b42 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.255416] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1038.255416] env[68437]: value = "task-2944650" [ 1038.255416] env[68437]: _type = "Task" [ 1038.255416] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.259534] env[68437]: DEBUG oslo_vmware.api [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for the task: (returnval){ [ 1038.259534] env[68437]: value = "task-2944651" [ 1038.259534] env[68437]: _type = "Task" [ 1038.259534] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.269128] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.274650] env[68437]: DEBUG oslo_vmware.api [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.390899] env[68437]: DEBUG nova.network.neutron [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Updating instance_info_cache with network_info: [{"id": "2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21", "address": "fa:16:3e:16:45:f3", "network": {"id": "3584c047-b483-4c91-b78c-030eef86fbe5", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1619752157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b16a7e41c7f648dfb17033f16e092f5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c449a27-60", "ovs_interfaceid": "2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.405744] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.405943] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.406167] env[68437]: DEBUG nova.network.neutron [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1038.481775] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.175s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.485547] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 7.785s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.489460] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.489460] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1038.489460] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987df70f-aba5-40a3-8c49-4dda8ebe17e1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.496955] env[68437]: DEBUG nova.compute.manager [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Received event network-vif-plugged-2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1038.498032] env[68437]: DEBUG oslo_concurrency.lockutils [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] Acquiring lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.498366] env[68437]: DEBUG oslo_concurrency.lockutils [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] Lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.498704] env[68437]: DEBUG oslo_concurrency.lockutils [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] Lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.499041] env[68437]: DEBUG nova.compute.manager [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] No waiting events found dispatching network-vif-plugged-2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1038.499371] env[68437]: WARNING nova.compute.manager [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Received unexpected event network-vif-plugged-2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21 for instance with vm_state building and task_state spawning. [ 1038.499706] env[68437]: DEBUG nova.compute.manager [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Received event network-changed-2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1038.500041] env[68437]: DEBUG nova.compute.manager [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Refreshing instance network info cache due to event network-changed-2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1038.500368] env[68437]: DEBUG oslo_concurrency.lockutils [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] Acquiring lock "refresh_cache-33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.509711] env[68437]: DEBUG nova.compute.manager [req-4ede3130-4743-4e7c-be88-4a2d59a32f2d req-c5b383a1-0e5a-4368-bd52-afea6533ce5c service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Received event network-changed-d21ad3db-ccd9-4d63-9eb0-4620abdab063 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1038.509812] env[68437]: DEBUG nova.compute.manager [req-4ede3130-4743-4e7c-be88-4a2d59a32f2d req-c5b383a1-0e5a-4368-bd52-afea6533ce5c service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Refreshing instance network info cache due to event network-changed-d21ad3db-ccd9-4d63-9eb0-4620abdab063. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1038.510408] env[68437]: DEBUG oslo_concurrency.lockutils [req-4ede3130-4743-4e7c-be88-4a2d59a32f2d req-c5b383a1-0e5a-4368-bd52-afea6533ce5c service nova] Acquiring lock "refresh_cache-ccad008b-0a3a-4234-9c4c-c3a5230a938e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.510408] env[68437]: DEBUG oslo_concurrency.lockutils [req-4ede3130-4743-4e7c-be88-4a2d59a32f2d req-c5b383a1-0e5a-4368-bd52-afea6533ce5c service nova] Acquired lock "refresh_cache-ccad008b-0a3a-4234-9c4c-c3a5230a938e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.510624] env[68437]: DEBUG nova.network.neutron [req-4ede3130-4743-4e7c-be88-4a2d59a32f2d req-c5b383a1-0e5a-4368-bd52-afea6533ce5c service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Refreshing network info cache for port d21ad3db-ccd9-4d63-9eb0-4620abdab063 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1038.514997] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a609d4b8-27bb-4637-b8be-47aa443c324b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.526885] env[68437]: INFO nova.scheduler.client.report [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Deleted allocations for instance e51356e4-7647-4678-bb4f-f069b5c7fef6 [ 1038.555924] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee42bdc-28bc-4416-b88b-56a447d182d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.565256] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a294b3f-ae98-4b6d-8466-89318163431d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.595881] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179172MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1038.596034] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.596250] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.609109] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.707537] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.707837] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.733202] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944649, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070211} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.733503] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.734409] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6f7258-38ec-49d2-a12d-6a6f01ec6cd2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.757020] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28/f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.757385] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87ccc07a-8977-44d9-b34b-64e77235ce9a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.782435] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.786598] env[68437]: DEBUG oslo_vmware.api [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Task: {'id': task-2944651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158286} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.787052] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1038.787052] env[68437]: value = "task-2944652" [ 1038.787052] env[68437]: _type = "Task" [ 1038.787052] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.787391] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1038.787695] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1038.788014] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1038.788326] env[68437]: INFO nova.compute.manager [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1038.788702] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1038.789069] env[68437]: DEBUG nova.compute.manager [-] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1038.789239] env[68437]: DEBUG nova.network.neutron [-] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1038.799797] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944652, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.893184] env[68437]: DEBUG oslo_concurrency.lockutils [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "697d5011-fb4e-4542-851b-39953bbb293d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.893572] env[68437]: DEBUG oslo_concurrency.lockutils [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.893820] env[68437]: DEBUG oslo_concurrency.lockutils [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "697d5011-fb4e-4542-851b-39953bbb293d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.894049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.894349] env[68437]: DEBUG oslo_concurrency.lockutils [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.896754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Releasing lock "refresh_cache-33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.897145] env[68437]: DEBUG nova.compute.manager [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Instance network_info: |[{"id": "2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21", "address": "fa:16:3e:16:45:f3", "network": {"id": "3584c047-b483-4c91-b78c-030eef86fbe5", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1619752157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b16a7e41c7f648dfb17033f16e092f5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c449a27-60", "ovs_interfaceid": "2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1038.897749] env[68437]: INFO nova.compute.manager [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Terminating instance [ 1038.899646] env[68437]: DEBUG oslo_concurrency.lockutils [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] Acquired lock "refresh_cache-33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.899801] env[68437]: DEBUG nova.network.neutron [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Refreshing network info cache for port 2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1038.901765] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:45:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea6e81c3-94aa-40a6-a4d4-7f338b503442', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1038.911301] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Creating folder: Project (b16a7e41c7f648dfb17033f16e092f5d). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1038.918045] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48cd5f8c-f956-436c-a64d-8a821e469384 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.931026] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Created folder: Project (b16a7e41c7f648dfb17033f16e092f5d) in parent group-v590848. [ 1038.931026] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Creating folder: Instances. Parent ref: group-v591072. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1038.931026] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06aee33e-6fb3-4d24-865d-cc637344c6d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.940282] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Created folder: Instances in parent group-v591072. [ 1038.940524] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1038.940716] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1038.941087] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3587bc15-823e-4fa8-962c-0c4e2a7fa5e1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.969396] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1038.969396] env[68437]: value = "task-2944655" [ 1038.969396] env[68437]: _type = "Task" [ 1038.969396] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.982075] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944655, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.037286] env[68437]: DEBUG oslo_concurrency.lockutils [None req-865adfeb-8c59-40f4-96c6-a1ca96f5d2e3 tempest-ServerDiagnosticsV248Test-285430919 tempest-ServerDiagnosticsV248Test-285430919-project-member] Lock "e51356e4-7647-4678-bb4f-f069b5c7fef6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.460s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.117472] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.211058] env[68437]: DEBUG nova.compute.manager [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1039.269407] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.300660] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944652, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.314992] env[68437]: DEBUG nova.network.neutron [req-4ede3130-4743-4e7c-be88-4a2d59a32f2d req-c5b383a1-0e5a-4368-bd52-afea6533ce5c service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Updated VIF entry in instance network info cache for port d21ad3db-ccd9-4d63-9eb0-4620abdab063. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1039.315863] env[68437]: DEBUG nova.network.neutron [req-4ede3130-4743-4e7c-be88-4a2d59a32f2d req-c5b383a1-0e5a-4368-bd52-afea6533ce5c service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Updating instance_info_cache with network_info: [{"id": "d21ad3db-ccd9-4d63-9eb0-4620abdab063", "address": "fa:16:3e:2e:04:04", "network": {"id": "12d67069-fd07-4979-9476-3dc1ea37cd07", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1182471126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68970ec925754b3faeba3b431241ce29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd21ad3db-cc", "ovs_interfaceid": "d21ad3db-ccd9-4d63-9eb0-4620abdab063", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.364722] env[68437]: DEBUG nova.network.neutron [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Updated VIF entry in instance network info cache for port 2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1039.365206] env[68437]: DEBUG nova.network.neutron [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Updating instance_info_cache with network_info: [{"id": "2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21", "address": "fa:16:3e:16:45:f3", "network": {"id": "3584c047-b483-4c91-b78c-030eef86fbe5", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1619752157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b16a7e41c7f648dfb17033f16e092f5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea6e81c3-94aa-40a6-a4d4-7f338b503442", "external-id": "nsx-vlan-transportzone-637", "segmentation_id": 637, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c449a27-60", "ovs_interfaceid": "2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.373931] env[68437]: DEBUG nova.network.neutron [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance_info_cache with network_info: [{"id": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "address": "fa:16:3e:cc:6f:88", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec75d03-3e", "ovs_interfaceid": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.417092] env[68437]: DEBUG nova.compute.manager [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1039.417330] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.418949] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba36444-44e5-440b-838c-084293a6c7b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.428484] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.428777] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88182ec1-9f8a-49e5-838f-0d2ce77ce975 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.436373] env[68437]: DEBUG oslo_vmware.api [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1039.436373] env[68437]: value = "task-2944656" [ 1039.436373] env[68437]: _type = "Task" [ 1039.436373] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.446524] env[68437]: DEBUG oslo_vmware.api [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.452599] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "832697dc-53ec-406d-b698-d10766bd8f9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.452864] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "832697dc-53ec-406d-b698-d10766bd8f9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.478860] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944655, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.622703] env[68437]: DEBUG oslo_vmware.api [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944636, 'name': ReconfigVM_Task, 'duration_secs': 6.479502} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.622983] env[68437]: DEBUG oslo_concurrency.lockutils [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.623291] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Reconfigured VM to detach interface {{(pid=68437) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1039.633226] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 19dde8dd-eae6-41a0-b147-c505db1cda15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.634428] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance cf691a81-60e3-40ed-ba80-8f481ff2554b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.634428] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8ccd7176-55c0-4118-a07e-3c4bdbba9795 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.634428] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance a01364f9-e30d-4140-ae41-1e7c4aaa2251 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.634428] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.634428] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 353ebb37-7e69-49d4-873e-2272cbfff6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.634428] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 3f82b137-81d5-4754-b222-3cefce0b2a10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.634428] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 697d5011-fb4e-4542-851b-39953bbb293d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.635140] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.635140] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ccad008b-0a3a-4234-9c4c-c3a5230a938e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.635140] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance bd3721bf-74fb-41b3-8090-1b370c0ea9fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.635140] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 75a9fb57-5796-4853-b429-6e8ea7aba1de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.635140] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1039.735256] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.767832] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944650, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.432146} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.768134] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 75a9fb57-5796-4853-b429-6e8ea7aba1de/75a9fb57-5796-4853-b429-6e8ea7aba1de.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1039.768373] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1039.768633] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63b75af7-ee6e-47cc-a3e9-297f9c18304e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.775477] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1039.775477] env[68437]: value = "task-2944657" [ 1039.775477] env[68437]: _type = "Task" [ 1039.775477] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.782672] env[68437]: DEBUG nova.network.neutron [-] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.783918] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944657, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.802607] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944652, 'name': ReconfigVM_Task, 'duration_secs': 0.704169} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.802913] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Reconfigured VM instance instance-0000004e to attach disk [datastore1] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28/f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.803578] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15f7a5b8-e3aa-456f-aa82-4ddccabdb227 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.809258] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1039.809258] env[68437]: value = "task-2944658" [ 1039.809258] env[68437]: _type = "Task" [ 1039.809258] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.822332] env[68437]: DEBUG oslo_concurrency.lockutils [req-4ede3130-4743-4e7c-be88-4a2d59a32f2d req-c5b383a1-0e5a-4368-bd52-afea6533ce5c service nova] Releasing lock "refresh_cache-ccad008b-0a3a-4234-9c4c-c3a5230a938e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.823094] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944658, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.869405] env[68437]: DEBUG oslo_concurrency.lockutils [req-bc435e2a-d83c-4f9d-85df-40f19c981ef8 req-302006b6-767e-4008-a88c-469c124c7f1b service nova] Releasing lock "refresh_cache-33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.876750] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.946350] env[68437]: DEBUG oslo_vmware.api [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944656, 'name': PowerOffVM_Task, 'duration_secs': 0.308391} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.946499] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.946674] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1039.946924] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5283ad0d-8972-40d4-b2c8-aadb47452ece {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.955581] env[68437]: DEBUG nova.compute.manager [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1039.980821] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944655, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.011963] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.012209] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.012392] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleting the datastore file [datastore1] 697d5011-fb4e-4542-851b-39953bbb293d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.012646] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0a9099b-abf6-4ecc-907e-082e84aae911 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.018859] env[68437]: DEBUG oslo_vmware.api [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1040.018859] env[68437]: value = "task-2944660" [ 1040.018859] env[68437]: _type = "Task" [ 1040.018859] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.026951] env[68437]: DEBUG oslo_vmware.api [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944660, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.138480] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ee0450b5-66ce-41ed-9f4f-7ffa7b46f769 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.285394] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944657, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068535} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.285586] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1040.286360] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7392c2-9ec1-4725-a107-6b63dc08dae5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.289365] env[68437]: INFO nova.compute.manager [-] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Took 1.50 seconds to deallocate network for instance. [ 1040.314629] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 75a9fb57-5796-4853-b429-6e8ea7aba1de/75a9fb57-5796-4853-b429-6e8ea7aba1de.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.315386] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7774dc73-7959-472c-8b9c-b16f5af47665 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.341063] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944658, 'name': Rename_Task, 'duration_secs': 0.292678} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.345017] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1040.345017] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1040.345017] env[68437]: value = "task-2944661" [ 1040.345017] env[68437]: _type = "Task" [ 1040.345017] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.345017] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7dae25fb-08c5-4f33-a21d-7b4ace93e0cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.355266] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944661, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.356881] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1040.356881] env[68437]: value = "task-2944662" [ 1040.356881] env[68437]: _type = "Task" [ 1040.356881] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.368447] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944662, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.382372] env[68437]: DEBUG nova.compute.manager [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1040.383369] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f52a85-e138-47da-bea2-fcde50814aa0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.481695] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944655, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.482337] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.526692] env[68437]: DEBUG nova.compute.manager [req-e9365ad2-e78a-492f-92ad-8a1702810cec req-f55195a0-5c22-468d-97cb-b602c4fbf2d3 service nova] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Received event network-vif-deleted-797183d0-327b-4e58-9355-0ba9d8beecdc {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1040.527072] env[68437]: DEBUG nova.compute.manager [req-e9365ad2-e78a-492f-92ad-8a1702810cec req-f55195a0-5c22-468d-97cb-b602c4fbf2d3 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-vif-deleted-6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1040.527560] env[68437]: INFO nova.compute.manager [req-e9365ad2-e78a-492f-92ad-8a1702810cec req-f55195a0-5c22-468d-97cb-b602c4fbf2d3 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Neutron deleted interface 6ccc745c-15f0-4593-b5f9-a8bab6edf0d6; detaching it from the instance and deleting it from the info cache [ 1040.527994] env[68437]: DEBUG nova.network.neutron [req-e9365ad2-e78a-492f-92ad-8a1702810cec req-f55195a0-5c22-468d-97cb-b602c4fbf2d3 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [{"id": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "address": "fa:16:3e:ed:52:c2", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd525d9d-aa", "ovs_interfaceid": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "834e4e03-93e8-487e-bb7d-d4774e7092d7", "address": "fa:16:3e:bb:a2:71", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap834e4e03-93", "ovs_interfaceid": "834e4e03-93e8-487e-bb7d-d4774e7092d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.539023] env[68437]: DEBUG oslo_vmware.api [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944660, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16977} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.539023] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.539023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.539023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.539023] env[68437]: INFO nova.compute.manager [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1040.539023] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1040.539023] env[68437]: DEBUG nova.compute.manager [-] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1040.539023] env[68437]: DEBUG nova.network.neutron [-] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1040.642492] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 832697dc-53ec-406d-b698-d10766bd8f9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.642492] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1040.642492] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3072MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1040.816593] env[68437]: DEBUG oslo_concurrency.lockutils [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.856152] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944661, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.868784] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944662, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.886744] env[68437]: DEBUG oslo_concurrency.lockutils [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.886922] env[68437]: DEBUG oslo_concurrency.lockutils [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.887126] env[68437]: DEBUG nova.network.neutron [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1040.889727] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ace76ae-76db-4435-a979-32c820eab5d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.900919] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06465d36-625b-4124-a957-722d27dddce8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.931982] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b00c91-7e72-43f5-a2a0-af6be035fc4b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.939820] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25e6418-9a00-49fd-b56e-df8484648825 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.955098] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.979922] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944655, 'name': CreateVM_Task, 'duration_secs': 1.829312} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.980069] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1040.980740] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.980940] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.981317] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1040.981553] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82ea8de6-5da8-4b61-9173-b93393bcd486 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.986426] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1040.986426] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a097a7-22de-a933-26ee-b503def56340" [ 1040.986426] env[68437]: _type = "Task" [ 1040.986426] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.994186] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a097a7-22de-a933-26ee-b503def56340, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.031273] env[68437]: DEBUG oslo_concurrency.lockutils [req-e9365ad2-e78a-492f-92ad-8a1702810cec req-f55195a0-5c22-468d-97cb-b602c4fbf2d3 service nova] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.031436] env[68437]: DEBUG oslo_concurrency.lockutils [req-e9365ad2-e78a-492f-92ad-8a1702810cec req-f55195a0-5c22-468d-97cb-b602c4fbf2d3 service nova] Acquired lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.032293] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f373fc2-cc46-4761-a13a-825dca2fa78b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.050187] env[68437]: DEBUG oslo_concurrency.lockutils [req-e9365ad2-e78a-492f-92ad-8a1702810cec req-f55195a0-5c22-468d-97cb-b602c4fbf2d3 service nova] Releasing lock "3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.050454] env[68437]: WARNING nova.compute.manager [req-e9365ad2-e78a-492f-92ad-8a1702810cec req-f55195a0-5c22-468d-97cb-b602c4fbf2d3 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Detach interface failed, port_id=6ccc745c-15f0-4593-b5f9-a8bab6edf0d6, reason: No device with interface-id 6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 exists on VM: nova.exception.NotFound: No device with interface-id 6ccc745c-15f0-4593-b5f9-a8bab6edf0d6 exists on VM [ 1041.110986] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.111064] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.111309] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.111495] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.111662] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.114193] env[68437]: INFO nova.compute.manager [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Terminating instance [ 1041.314438] env[68437]: DEBUG nova.network.neutron [-] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.354421] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944661, 'name': ReconfigVM_Task, 'duration_secs': 0.82319} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.357308] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 75a9fb57-5796-4853-b429-6e8ea7aba1de/75a9fb57-5796-4853-b429-6e8ea7aba1de.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.357308] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b84ef58-414a-4dcc-9c47-66366e997ef9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.362647] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1041.362647] env[68437]: value = "task-2944663" [ 1041.362647] env[68437]: _type = "Task" [ 1041.362647] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.368943] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944662, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.375152] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944663, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.404517] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2aa24e-dda4-4dce-bfbd-3557911fb80f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.411883] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Doing hard reboot of VM {{(pid=68437) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1041.412309] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-95b280d5-4ac3-4070-9f35-f223422bce1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.418941] env[68437]: DEBUG oslo_vmware.api [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1041.418941] env[68437]: value = "task-2944664" [ 1041.418941] env[68437]: _type = "Task" [ 1041.418941] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.431150] env[68437]: DEBUG oslo_vmware.api [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944664, 'name': ResetVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.458865] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.499010] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a097a7-22de-a933-26ee-b503def56340, 'name': SearchDatastore_Task, 'duration_secs': 0.010018} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.499412] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.499683] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1041.499895] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.500053] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.500360] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.500644] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97510031-68f6-4c35-9f77-c862929dbb5f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.513908] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.514755] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1041.519066] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eabf14d-7df4-43df-bfb4-a845a4e391d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.528047] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1041.528047] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5263e6d3-2ee0-6b7d-ee99-aa0310d3a93a" [ 1041.528047] env[68437]: _type = "Task" [ 1041.528047] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.536226] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5263e6d3-2ee0-6b7d-ee99-aa0310d3a93a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.619917] env[68437]: DEBUG nova.compute.manager [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1041.619917] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1041.619917] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538effa7-e496-47a1-8d20-82ee1c9e286f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.628850] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1041.629288] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a20a2f5-229e-4512-9aa8-83c9adcff046 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.636268] env[68437]: DEBUG oslo_vmware.api [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1041.636268] env[68437]: value = "task-2944665" [ 1041.636268] env[68437]: _type = "Task" [ 1041.636268] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.645912] env[68437]: DEBUG oslo_vmware.api [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944665, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.819331] env[68437]: INFO nova.compute.manager [-] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Took 1.28 seconds to deallocate network for instance. [ 1041.872373] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944662, 'name': PowerOnVM_Task, 'duration_secs': 1.181445} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.873216] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.876021] env[68437]: DEBUG nova.compute.manager [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1041.876021] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113ed86c-b223-40f4-8782-63a48c7dcb39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.879980] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944663, 'name': Rename_Task, 'duration_secs': 0.212422} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.880892] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1041.881248] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33c20a08-7e1b-4689-b8bb-76df3f968c04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.891484] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1041.891484] env[68437]: value = "task-2944666" [ 1041.891484] env[68437]: _type = "Task" [ 1041.891484] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.903359] env[68437]: INFO nova.network.neutron [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Port 834e4e03-93e8-487e-bb7d-d4774e7092d7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1041.903996] env[68437]: DEBUG nova.network.neutron [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [{"id": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "address": "fa:16:3e:ed:52:c2", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd525d9d-aa", "ovs_interfaceid": "bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.910522] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944666, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.932715] env[68437]: DEBUG oslo_vmware.api [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944664, 'name': ResetVM_Task, 'duration_secs': 0.114126} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.933204] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Did hard reboot of VM {{(pid=68437) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1041.933742] env[68437]: DEBUG nova.compute.manager [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1041.934728] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a12cd2-d900-4377-b6ba-6aa7124724b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.963527] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1041.964639] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.368s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.967122] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.230s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.967122] env[68437]: INFO nova.compute.claims [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.040174] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5263e6d3-2ee0-6b7d-ee99-aa0310d3a93a, 'name': SearchDatastore_Task, 'duration_secs': 0.012219} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.040174] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10579a29-1f24-4227-9185-66ae6a6bcdbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.043128] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1042.043128] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521671bf-515e-0f1d-0247-0b07c50c1fcf" [ 1042.043128] env[68437]: _type = "Task" [ 1042.043128] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.050534] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521671bf-515e-0f1d-0247-0b07c50c1fcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.146763] env[68437]: DEBUG oslo_vmware.api [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944665, 'name': PowerOffVM_Task, 'duration_secs': 0.353411} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.149150] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1042.149150] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1042.149150] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0e9b49f-e4fe-4d03-8e68-a7572590c4de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.272030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1042.272030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1042.272030] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleting the datastore file [datastore1] 3f82b137-81d5-4754-b222-3cefce0b2a10 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1042.272030] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6c6bbaa-a3cd-443f-9e3b-f5bf8c6a64a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.280121] env[68437]: DEBUG oslo_vmware.api [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1042.280121] env[68437]: value = "task-2944668" [ 1042.280121] env[68437]: _type = "Task" [ 1042.280121] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.287948] env[68437]: DEBUG oslo_vmware.api [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.324685] env[68437]: DEBUG oslo_concurrency.lockutils [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.389013] env[68437]: INFO nova.compute.manager [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] bringing vm to original state: 'stopped' [ 1042.401075] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944666, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.412041] env[68437]: DEBUG oslo_concurrency.lockutils [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "refresh_cache-3f82b137-81d5-4754-b222-3cefce0b2a10" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.447757] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a62fbbb7-692a-43dd-9a9b-d1d88b987362 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.570s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.556804] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521671bf-515e-0f1d-0247-0b07c50c1fcf, 'name': SearchDatastore_Task, 'duration_secs': 0.017548} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.557107] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.557387] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3/33cc7565-9cd0-47a7-afe2-ac3849ba7ac3.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1042.557641] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0adb9280-8032-41d2-9bff-61a983402f30 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.563726] env[68437]: DEBUG nova.compute.manager [req-ec80ebed-5135-4d28-81a6-c48ad3843cb9 req-3e6c29b6-cec7-452e-b517-66f40b0b5915 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-vif-deleted-834e4e03-93e8-487e-bb7d-d4774e7092d7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1042.563921] env[68437]: DEBUG nova.compute.manager [req-ec80ebed-5135-4d28-81a6-c48ad3843cb9 req-3e6c29b6-cec7-452e-b517-66f40b0b5915 service nova] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Received event network-vif-deleted-b9c19590-2f8d-4149-989f-8d0fd1e5fe29 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1042.565969] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1042.565969] env[68437]: value = "task-2944669" [ 1042.565969] env[68437]: _type = "Task" [ 1042.565969] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.576820] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.788427] env[68437]: DEBUG oslo_vmware.api [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.274295} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.788695] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1042.788880] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1042.789073] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1042.789249] env[68437]: INFO nova.compute.manager [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1042.789488] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1042.789685] env[68437]: DEBUG nova.compute.manager [-] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1042.789779] env[68437]: DEBUG nova.network.neutron [-] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1042.853166] env[68437]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 834e4e03-93e8-487e-bb7d-d4774e7092d7 could not be found.", "detail": ""}} {{(pid=68437) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1042.853407] env[68437]: DEBUG nova.network.neutron [-] Unable to show port 834e4e03-93e8-487e-bb7d-d4774e7092d7 as it no longer exists. {{(pid=68437) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1042.909334] env[68437]: DEBUG oslo_vmware.api [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944666, 'name': PowerOnVM_Task, 'duration_secs': 0.722095} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.909722] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.910036] env[68437]: INFO nova.compute.manager [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Took 9.52 seconds to spawn the instance on the hypervisor. [ 1042.910313] env[68437]: DEBUG nova.compute.manager [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1042.911905] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02017524-fae1-4265-86db-682e85fb1c9b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.917263] env[68437]: DEBUG oslo_concurrency.lockutils [None req-be57964e-e541-4e0b-8ceb-78386076a585 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "interface-3f82b137-81d5-4754-b222-3cefce0b2a10-6ccc745c-15f0-4593-b5f9-a8bab6edf0d6" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.619s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.075735] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944669, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.211304] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6a03a5-5efa-474e-bbcc-017c49b55046 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.221206] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2f29f4-fce9-4dc7-baea-a6beaeaac7a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.268087] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a80dcca-e0e3-4b05-8c0d-24368cd4f8c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.277581] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb8880f-13d0-4044-ae01-a58770937b2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.293181] env[68437]: DEBUG nova.compute.provider_tree [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.404646] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.404646] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.404646] env[68437]: DEBUG nova.compute.manager [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.405541] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d637a4e3-10a5-4c56-b591-c376509142b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.420447] env[68437]: DEBUG nova.compute.manager [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1043.438076] env[68437]: INFO nova.compute.manager [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Took 19.80 seconds to build instance. [ 1043.580030] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.908392} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.580030] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3/33cc7565-9cd0-47a7-afe2-ac3849ba7ac3.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1043.580270] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1043.580418] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-61f941c2-d425-4cca-a5c6-05b6d7b98286 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.587367] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1043.587367] env[68437]: value = "task-2944670" [ 1043.587367] env[68437]: _type = "Task" [ 1043.587367] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.596662] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944670, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.797343] env[68437]: DEBUG nova.scheduler.client.report [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1043.925781] env[68437]: DEBUG nova.network.neutron [-] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.927744] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1043.927744] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a17d5dda-ab17-4165-8cd0-23234c3f82b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.936467] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1043.936467] env[68437]: value = "task-2944671" [ 1043.936467] env[68437]: _type = "Task" [ 1043.936467] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.940754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e916332d-99cf-4354-bd35-4f9d39c401e9 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.310s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.948320] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.098507] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944670, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079101} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.098802] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1044.100147] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ced885-449d-48f8-abfd-e9a9d69266f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.124895] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3/33cc7565-9cd0-47a7-afe2-ac3849ba7ac3.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.125238] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-567ca6b2-76bc-474d-9dc1-af47e5f47847 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.145282] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1044.145282] env[68437]: value = "task-2944672" [ 1044.145282] env[68437]: _type = "Task" [ 1044.145282] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.155471] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944672, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.305047] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.305627] env[68437]: DEBUG nova.compute.manager [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1044.308476] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.826s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.309861] env[68437]: INFO nova.compute.claims [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1044.431585] env[68437]: INFO nova.compute.manager [-] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Took 1.64 seconds to deallocate network for instance. [ 1044.448753] env[68437]: DEBUG oslo_vmware.api [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944671, 'name': PowerOffVM_Task, 'duration_secs': 0.406897} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.449049] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.449532] env[68437]: DEBUG nova.compute.manager [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1044.450944] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3b75f1-a910-4645-859f-ae243118e954 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.537262] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.537530] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.805872] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.805872] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.811048] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.813746] env[68437]: DEBUG nova.compute.utils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1044.815571] env[68437]: DEBUG nova.compute.manager [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1044.815740] env[68437]: DEBUG nova.network.neutron [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1044.838737] env[68437]: DEBUG nova.compute.manager [req-4a6de077-8eb9-4919-929e-8119cc995992 req-b32d4e0e-e993-4b19-bf34-47f3d8405a84 service nova] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Received event network-vif-deleted-bd525d9d-aa3e-49a9-bae0-6b4431b4bf2c {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1044.858426] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.858659] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.876266] env[68437]: DEBUG nova.policy [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6801cab23bf4aadb8d7f326f0643c32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73e8290afeb84bf3976cfa22d3452ca7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1044.941170] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.963507] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.559s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.040266] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1045.157866] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944672, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.183202] env[68437]: DEBUG nova.network.neutron [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Successfully created port: e35bd2c3-ec8e-4c14-90f7-e714e59882b2 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.298618] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.298867] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.312426] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1045.323887] env[68437]: DEBUG nova.compute.manager [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1045.362476] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1045.471174] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.562307] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.611865] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b26467-b08d-4745-8329-caf4469b5938 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.619543] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7422fcbe-1db0-4ceb-b06e-bedaef1846b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.663498] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2cd00e-5afe-4fd2-82ff-d9daa54e46b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.670784] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944672, 'name': ReconfigVM_Task, 'duration_secs': 1.044526} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.672790] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3/33cc7565-9cd0-47a7-afe2-ac3849ba7ac3.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1045.673496] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c5da6fc-b09d-46d2-9c58-13ecb9c2643a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.675997] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158da6de-bce3-45de-8e79-d53b85669d95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.690640] env[68437]: DEBUG nova.compute.provider_tree [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.692896] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1045.692896] env[68437]: value = "task-2944673" [ 1045.692896] env[68437]: _type = "Task" [ 1045.692896] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.700273] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944673, 'name': Rename_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.802702] env[68437]: DEBUG nova.compute.utils [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1045.837028] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.881822] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.194931] env[68437]: DEBUG nova.scheduler.client.report [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1046.207298] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944673, 'name': Rename_Task, 'duration_secs': 0.419441} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.207461] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1046.207756] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-675d7601-86de-48f4-91b4-6cd01bd0e257 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.215026] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1046.215026] env[68437]: value = "task-2944674" [ 1046.215026] env[68437]: _type = "Task" [ 1046.215026] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.222804] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944674, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.305373] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.338830] env[68437]: DEBUG nova.compute.manager [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1046.364074] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1046.364335] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1046.364492] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1046.364761] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1046.364959] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1046.365182] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1046.365430] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1046.365622] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1046.365840] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1046.366027] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1046.366208] env[68437]: DEBUG nova.virt.hardware [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1046.367052] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b676a1f-e3dd-43f0-9e0d-f9e7014cbbbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.375112] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91af6685-5a85-4a92-a386-ba93ab11e252 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.379066] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.379340] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.379587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.379779] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.379980] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.382039] env[68437]: INFO nova.compute.manager [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Terminating instance [ 1046.665963] env[68437]: DEBUG nova.network.neutron [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Successfully updated port: e35bd2c3-ec8e-4c14-90f7-e714e59882b2 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1046.703270] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.703875] env[68437]: DEBUG nova.compute.manager [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1046.706492] env[68437]: DEBUG oslo_concurrency.lockutils [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.890s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.707346] env[68437]: DEBUG nova.objects.instance [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lazy-loading 'resources' on Instance uuid bd3721bf-74fb-41b3-8090-1b370c0ea9fb {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.726550] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944674, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.869025] env[68437]: DEBUG nova.compute.manager [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Received event network-vif-plugged-e35bd2c3-ec8e-4c14-90f7-e714e59882b2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1046.869204] env[68437]: DEBUG oslo_concurrency.lockutils [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] Acquiring lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.869495] env[68437]: DEBUG oslo_concurrency.lockutils [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.869570] env[68437]: DEBUG oslo_concurrency.lockutils [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.869760] env[68437]: DEBUG nova.compute.manager [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] No waiting events found dispatching network-vif-plugged-e35bd2c3-ec8e-4c14-90f7-e714e59882b2 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1046.870015] env[68437]: WARNING nova.compute.manager [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Received unexpected event network-vif-plugged-e35bd2c3-ec8e-4c14-90f7-e714e59882b2 for instance with vm_state building and task_state spawning. [ 1046.870148] env[68437]: DEBUG nova.compute.manager [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Received event network-changed-e35bd2c3-ec8e-4c14-90f7-e714e59882b2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1046.870319] env[68437]: DEBUG nova.compute.manager [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Refreshing instance network info cache due to event network-changed-e35bd2c3-ec8e-4c14-90f7-e714e59882b2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1046.870531] env[68437]: DEBUG oslo_concurrency.lockutils [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] Acquiring lock "refresh_cache-ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.870907] env[68437]: DEBUG oslo_concurrency.lockutils [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] Acquired lock "refresh_cache-ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.870907] env[68437]: DEBUG nova.network.neutron [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Refreshing network info cache for port e35bd2c3-ec8e-4c14-90f7-e714e59882b2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1046.893693] env[68437]: DEBUG nova.compute.manager [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1046.893693] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1046.894336] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7831c140-65f4-473d-9d32-9ddbeed9d9bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.908709] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1046.908952] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b45ad073-f4e5-49aa-82c0-4370a5e2f801 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.978618] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1046.978894] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1046.979118] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleting the datastore file [datastore1] f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1046.979396] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30c84d9c-2542-4c57-ab43-3be1d41dad4d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.986572] env[68437]: DEBUG oslo_vmware.api [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1046.986572] env[68437]: value = "task-2944676" [ 1046.986572] env[68437]: _type = "Task" [ 1046.986572] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.994436] env[68437]: DEBUG oslo_vmware.api [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944676, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.168725] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "refresh_cache-ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.210317] env[68437]: DEBUG nova.compute.utils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1047.214424] env[68437]: DEBUG nova.compute.manager [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1047.214600] env[68437]: DEBUG nova.network.neutron [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1047.224851] env[68437]: DEBUG oslo_vmware.api [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944674, 'name': PowerOnVM_Task, 'duration_secs': 0.626157} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.225680] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1047.226030] env[68437]: INFO nova.compute.manager [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Took 11.23 seconds to spawn the instance on the hypervisor. [ 1047.226087] env[68437]: DEBUG nova.compute.manager [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1047.226840] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ba1efa-7d32-45d0-8a19-5170fe842dd2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.257289] env[68437]: DEBUG nova.policy [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a0e66fd345044e92857d742c65f537ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a36ec823128647758ca8047a5ebf1ae1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1047.373292] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.373550] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.373778] env[68437]: INFO nova.compute.manager [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Attaching volume 6e750883-5e0a-415f-9685-e2ba3a3826d6 to /dev/sdb [ 1047.406855] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb865c5-8b1d-44c2-a789-b4be76061a7a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.416469] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b15fd6-d1e8-4c35-a404-d0bf62ded01c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.419445] env[68437]: DEBUG nova.network.neutron [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1047.432217] env[68437]: DEBUG nova.virt.block_device [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Updating existing volume attachment record: 45e0a7d7-bdff-4f31-bfee-6ce73d8b04cb {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1047.464575] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09cc2eaa-5d45-44ff-81da-dc5b6bc16ce4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.472245] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15487d2a-22ad-47e1-9fe6-53650a99a7fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.510392] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba06bc3-1d34-42ca-a658-1858e7df46b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.517924] env[68437]: DEBUG oslo_vmware.api [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944676, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.255708} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.520144] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1047.520372] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1047.520551] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1047.520734] env[68437]: INFO nova.compute.manager [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1047.520994] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1047.521285] env[68437]: DEBUG nova.compute.manager [-] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1047.521389] env[68437]: DEBUG nova.network.neutron [-] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1047.523903] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de74f05-53d1-47aa-9cc1-1bc44753fc81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.538893] env[68437]: DEBUG nova.compute.provider_tree [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.575590] env[68437]: DEBUG nova.network.neutron [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.635352] env[68437]: DEBUG nova.network.neutron [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Successfully created port: 895256cb-13df-4810-9015-951c1dcea5a1 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1047.715142] env[68437]: DEBUG nova.compute.manager [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1047.746566] env[68437]: INFO nova.compute.manager [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Took 23.46 seconds to build instance. [ 1047.860347] env[68437]: DEBUG nova.compute.manager [req-397ac33a-0019-41c1-a29a-c7246fceddb7 req-f10fa009-a422-42a6-9263-991873fbabfe service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Received event network-vif-deleted-e6e03f32-de4c-4405-9930-c4a70c9d560d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1047.860723] env[68437]: INFO nova.compute.manager [req-397ac33a-0019-41c1-a29a-c7246fceddb7 req-f10fa009-a422-42a6-9263-991873fbabfe service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Neutron deleted interface e6e03f32-de4c-4405-9930-c4a70c9d560d; detaching it from the instance and deleting it from the info cache [ 1047.860723] env[68437]: DEBUG nova.network.neutron [req-397ac33a-0019-41c1-a29a-c7246fceddb7 req-f10fa009-a422-42a6-9263-991873fbabfe service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.041695] env[68437]: DEBUG nova.scheduler.client.report [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.081951] env[68437]: DEBUG oslo_concurrency.lockutils [req-d7f5049d-e784-4c7c-ab52-83e4acd452c6 req-ceb6a41c-0e8f-410f-a832-956f561f8e3a service nova] Releasing lock "refresh_cache-ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.082411] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "refresh_cache-ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.082623] env[68437]: DEBUG nova.network.neutron [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1048.250449] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e11946cf-c290-4c10-a716-c5acb8fd91ce tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.986s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.345719] env[68437]: DEBUG nova.network.neutron [-] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.366057] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6453d57a-e8fb-458b-8704-44e095fed720 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.375790] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f790c3c-21da-491e-9b5d-ad17b57648a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.407492] env[68437]: DEBUG nova.compute.manager [req-397ac33a-0019-41c1-a29a-c7246fceddb7 req-f10fa009-a422-42a6-9263-991873fbabfe service nova] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Detach interface failed, port_id=e6e03f32-de4c-4405-9930-c4a70c9d560d, reason: Instance f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1048.546876] env[68437]: DEBUG oslo_concurrency.lockutils [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.549196] env[68437]: DEBUG oslo_concurrency.lockutils [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.225s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.549458] env[68437]: DEBUG nova.objects.instance [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lazy-loading 'resources' on Instance uuid 697d5011-fb4e-4542-851b-39953bbb293d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1048.571754] env[68437]: INFO nova.scheduler.client.report [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Deleted allocations for instance bd3721bf-74fb-41b3-8090-1b370c0ea9fb [ 1048.619712] env[68437]: DEBUG nova.network.neutron [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1048.725084] env[68437]: DEBUG nova.compute.manager [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1048.749932] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1048.750196] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1048.750358] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1048.750545] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1048.750692] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1048.750868] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1048.751077] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1048.751239] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1048.751404] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1048.751566] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1048.751737] env[68437]: DEBUG nova.virt.hardware [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1048.752621] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba127c81-8da5-4aa0-9bb4-f8218bda5011 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.760910] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188062fb-5c87-45b7-a9f4-154b660b2a87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.765460] env[68437]: DEBUG nova.network.neutron [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Updating instance_info_cache with network_info: [{"id": "e35bd2c3-ec8e-4c14-90f7-e714e59882b2", "address": "fa:16:3e:ae:33:4f", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape35bd2c3-ec", "ovs_interfaceid": "e35bd2c3-ec8e-4c14-90f7-e714e59882b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.775828] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquiring lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.776069] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.776277] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquiring lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.776460] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.776630] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.778882] env[68437]: INFO nova.compute.manager [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Terminating instance [ 1048.846653] env[68437]: INFO nova.compute.manager [-] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Took 1.32 seconds to deallocate network for instance. [ 1049.030482] env[68437]: DEBUG nova.compute.manager [req-758aa387-d201-4e8c-b802-2644aff7fc33 req-dd1fa625-c54a-495d-9d45-789e9ffe59b8 service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Received event network-vif-plugged-895256cb-13df-4810-9015-951c1dcea5a1 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1049.030756] env[68437]: DEBUG oslo_concurrency.lockutils [req-758aa387-d201-4e8c-b802-2644aff7fc33 req-dd1fa625-c54a-495d-9d45-789e9ffe59b8 service nova] Acquiring lock "832697dc-53ec-406d-b698-d10766bd8f9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.031264] env[68437]: DEBUG oslo_concurrency.lockutils [req-758aa387-d201-4e8c-b802-2644aff7fc33 req-dd1fa625-c54a-495d-9d45-789e9ffe59b8 service nova] Lock "832697dc-53ec-406d-b698-d10766bd8f9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.031502] env[68437]: DEBUG oslo_concurrency.lockutils [req-758aa387-d201-4e8c-b802-2644aff7fc33 req-dd1fa625-c54a-495d-9d45-789e9ffe59b8 service nova] Lock "832697dc-53ec-406d-b698-d10766bd8f9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.031720] env[68437]: DEBUG nova.compute.manager [req-758aa387-d201-4e8c-b802-2644aff7fc33 req-dd1fa625-c54a-495d-9d45-789e9ffe59b8 service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] No waiting events found dispatching network-vif-plugged-895256cb-13df-4810-9015-951c1dcea5a1 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1049.031937] env[68437]: WARNING nova.compute.manager [req-758aa387-d201-4e8c-b802-2644aff7fc33 req-dd1fa625-c54a-495d-9d45-789e9ffe59b8 service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Received unexpected event network-vif-plugged-895256cb-13df-4810-9015-951c1dcea5a1 for instance with vm_state building and task_state spawning. [ 1049.078126] env[68437]: DEBUG oslo_concurrency.lockutils [None req-416cadef-e29a-4ef7-b494-7ab1b65d9c9b tempest-ServerPasswordTestJSON-1800658669 tempest-ServerPasswordTestJSON-1800658669-project-member] Lock "bd3721bf-74fb-41b3-8090-1b370c0ea9fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.970s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.115410] env[68437]: DEBUG nova.network.neutron [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Successfully updated port: 895256cb-13df-4810-9015-951c1dcea5a1 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1049.236684] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34297672-8b13-4ec3-ac7d-fe12b14acc6d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.244185] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9269507b-1d04-4d9b-83c9-08825c651471 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.274394] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "refresh_cache-ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.274731] env[68437]: DEBUG nova.compute.manager [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Instance network_info: |[{"id": "e35bd2c3-ec8e-4c14-90f7-e714e59882b2", "address": "fa:16:3e:ae:33:4f", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape35bd2c3-ec", "ovs_interfaceid": "e35bd2c3-ec8e-4c14-90f7-e714e59882b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1049.275343] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:33:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '305ccd93-08cb-4658-845c-d9b64952daf7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e35bd2c3-ec8e-4c14-90f7-e714e59882b2', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1049.283029] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Creating folder: Project (73e8290afeb84bf3976cfa22d3452ca7). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1049.283531] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bc7718-a465-4b99-96ff-00a5df390c23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.286512] env[68437]: DEBUG nova.compute.manager [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1049.286717] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.286943] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d426aa1f-09da-4ccd-b3ca-328e54ed28f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.290153] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600e9e6c-1521-44ed-8bc0-bc2027202a09 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.299564] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df2c76d-6528-4668-aa0b-ac8a029cc036 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.303643] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.304796] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-677bd43f-8057-47f2-951f-4dba6b00f44e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.306161] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Created folder: Project (73e8290afeb84bf3976cfa22d3452ca7) in parent group-v590848. [ 1049.306349] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Creating folder: Instances. Parent ref: group-v591077. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1049.306851] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8fa01b4-0257-4233-a606-403d76fc943a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.316623] env[68437]: DEBUG nova.compute.provider_tree [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.319615] env[68437]: DEBUG oslo_vmware.api [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1049.319615] env[68437]: value = "task-2944681" [ 1049.319615] env[68437]: _type = "Task" [ 1049.319615] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.327891] env[68437]: DEBUG oslo_vmware.api [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.330741] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Created folder: Instances in parent group-v591077. [ 1049.330995] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.331217] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1049.331437] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be263bf4-e321-40ad-876c-66fc456ddc4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.350705] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1049.350705] env[68437]: value = "task-2944683" [ 1049.350705] env[68437]: _type = "Task" [ 1049.350705] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.354373] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.360772] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944683, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.619052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "refresh_cache-832697dc-53ec-406d-b698-d10766bd8f9d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.619222] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "refresh_cache-832697dc-53ec-406d-b698-d10766bd8f9d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.619378] env[68437]: DEBUG nova.network.neutron [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1049.819830] env[68437]: DEBUG nova.scheduler.client.report [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.832089] env[68437]: DEBUG oslo_vmware.api [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944681, 'name': PowerOffVM_Task, 'duration_secs': 0.246958} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.832232] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.832404] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.832647] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8312fa0-e120-4880-9843-01f2a959146c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.860016] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944683, 'name': CreateVM_Task, 'duration_secs': 0.347215} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.860284] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1049.860979] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.861164] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.861481] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1049.861741] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09cba2b5-6194-439b-af62-901d5974e542 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.866072] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1049.866072] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52daf39e-8a43-f0a2-90cd-5475affe5a6b" [ 1049.866072] env[68437]: _type = "Task" [ 1049.866072] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.874030] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52daf39e-8a43-f0a2-90cd-5475affe5a6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.890292] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.890494] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.890672] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Deleting the datastore file [datastore1] 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.890926] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c7a59e7-3e2b-4232-b6c2-e6b57ba2ddde {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.905890] env[68437]: DEBUG oslo_vmware.api [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for the task: (returnval){ [ 1049.905890] env[68437]: value = "task-2944685" [ 1049.905890] env[68437]: _type = "Task" [ 1049.905890] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.915159] env[68437]: DEBUG oslo_vmware.api [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944685, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.153680] env[68437]: DEBUG nova.network.neutron [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1050.295709] env[68437]: DEBUG nova.network.neutron [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Updating instance_info_cache with network_info: [{"id": "895256cb-13df-4810-9015-951c1dcea5a1", "address": "fa:16:3e:80:be:c8", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap895256cb-13", "ovs_interfaceid": "895256cb-13df-4810-9015-951c1dcea5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.327035] env[68437]: DEBUG oslo_concurrency.lockutils [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.327784] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.387s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.328475] env[68437]: DEBUG nova.objects.instance [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'resources' on Instance uuid 3f82b137-81d5-4754-b222-3cefce0b2a10 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.353885] env[68437]: INFO nova.scheduler.client.report [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted allocations for instance 697d5011-fb4e-4542-851b-39953bbb293d [ 1050.377576] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52daf39e-8a43-f0a2-90cd-5475affe5a6b, 'name': SearchDatastore_Task, 'duration_secs': 0.009078} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.381020] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.381020] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1050.381020] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.381020] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.381020] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1050.381020] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fd2c7ee-e182-4374-831a-6e2581312042 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.388254] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1050.388624] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1050.389486] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a226e55e-bb58-4e3f-9458-4178e40b061f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.397788] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1050.397788] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525978e2-d8d2-b3d5-d844-a7a943cb1102" [ 1050.397788] env[68437]: _type = "Task" [ 1050.397788] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.404869] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525978e2-d8d2-b3d5-d844-a7a943cb1102, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.415522] env[68437]: DEBUG oslo_vmware.api [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Task: {'id': task-2944685, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209752} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.415522] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.415522] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1050.415522] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1050.415522] env[68437]: INFO nova.compute.manager [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1050.415868] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.415868] env[68437]: DEBUG nova.compute.manager [-] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1050.415972] env[68437]: DEBUG nova.network.neutron [-] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1050.801118] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "refresh_cache-832697dc-53ec-406d-b698-d10766bd8f9d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.801118] env[68437]: DEBUG nova.compute.manager [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Instance network_info: |[{"id": "895256cb-13df-4810-9015-951c1dcea5a1", "address": "fa:16:3e:80:be:c8", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap895256cb-13", "ovs_interfaceid": "895256cb-13df-4810-9015-951c1dcea5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1050.801118] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:be:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '895256cb-13df-4810-9015-951c1dcea5a1', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1050.814044] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.814044] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1050.814044] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d521b88e-aa56-4fd6-ba57-45fb9bfccf06 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.845665] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1050.845665] env[68437]: value = "task-2944687" [ 1050.845665] env[68437]: _type = "Task" [ 1050.845665] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.856522] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944687, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.863016] env[68437]: DEBUG oslo_concurrency.lockutils [None req-959e549f-1388-48e4-9130-54cabfff2942 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "697d5011-fb4e-4542-851b-39953bbb293d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.969s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.909169] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525978e2-d8d2-b3d5-d844-a7a943cb1102, 'name': SearchDatastore_Task, 'duration_secs': 0.008627} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.910030] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edb79dd9-50d1-4d92-aa57-15db82acc321 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.916137] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1050.916137] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5210d4f4-37b0-bf8f-56e9-b4fb9a80763c" [ 1050.916137] env[68437]: _type = "Task" [ 1050.916137] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.924396] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5210d4f4-37b0-bf8f-56e9-b4fb9a80763c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.063029] env[68437]: DEBUG nova.compute.manager [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Received event network-changed-895256cb-13df-4810-9015-951c1dcea5a1 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1051.063173] env[68437]: DEBUG nova.compute.manager [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Refreshing instance network info cache due to event network-changed-895256cb-13df-4810-9015-951c1dcea5a1. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1051.063389] env[68437]: DEBUG oslo_concurrency.lockutils [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] Acquiring lock "refresh_cache-832697dc-53ec-406d-b698-d10766bd8f9d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.063530] env[68437]: DEBUG oslo_concurrency.lockutils [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] Acquired lock "refresh_cache-832697dc-53ec-406d-b698-d10766bd8f9d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.063723] env[68437]: DEBUG nova.network.neutron [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Refreshing network info cache for port 895256cb-13df-4810-9015-951c1dcea5a1 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1051.071839] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d352e8c-e8d5-4bbb-99f1-a0727e6703c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.080631] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1681e013-776b-4161-99c9-aca7bd39c737 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.113072] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4968693-a055-449e-aca0-df13f51a5199 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.121505] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc9e039-ddbe-45e2-8354-5b8e19713bc9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.135266] env[68437]: DEBUG nova.compute.provider_tree [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.169570] env[68437]: DEBUG nova.network.neutron [-] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.357361] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944687, 'name': CreateVM_Task, 'duration_secs': 0.419119} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.357529] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1051.358247] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.358411] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.358914] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1051.358988] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3ae4c41-fe38-4cec-bb30-74c21af21bdb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.363522] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1051.363522] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5288bad7-2089-47bb-a7a7-6c802548126c" [ 1051.363522] env[68437]: _type = "Task" [ 1051.363522] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.370993] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5288bad7-2089-47bb-a7a7-6c802548126c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.426368] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5210d4f4-37b0-bf8f-56e9-b4fb9a80763c, 'name': SearchDatastore_Task, 'duration_secs': 0.010001} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.426975] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.426975] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ee0450b5-66ce-41ed-9f4f-7ffa7b46f769/ee0450b5-66ce-41ed-9f4f-7ffa7b46f769.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1051.427149] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2dea5394-43cd-45e9-9c1e-3d02ab016b18 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.433403] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1051.433403] env[68437]: value = "task-2944688" [ 1051.433403] env[68437]: _type = "Task" [ 1051.433403] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.441264] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944688, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.638435] env[68437]: DEBUG nova.scheduler.client.report [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.672716] env[68437]: INFO nova.compute.manager [-] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Took 1.26 seconds to deallocate network for instance. [ 1051.802701] env[68437]: DEBUG nova.network.neutron [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Updated VIF entry in instance network info cache for port 895256cb-13df-4810-9015-951c1dcea5a1. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1051.803112] env[68437]: DEBUG nova.network.neutron [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Updating instance_info_cache with network_info: [{"id": "895256cb-13df-4810-9015-951c1dcea5a1", "address": "fa:16:3e:80:be:c8", "network": {"id": "d3fa09df-f4c8-48f9-8228-27bd86388fae", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1054986550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a36ec823128647758ca8047a5ebf1ae1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap895256cb-13", "ovs_interfaceid": "895256cb-13df-4810-9015-951c1dcea5a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.874028] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5288bad7-2089-47bb-a7a7-6c802548126c, 'name': SearchDatastore_Task, 'duration_secs': 0.00976} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.874366] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.874606] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1051.874850] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.874997] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.875195] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1051.875464] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60f20a77-81a2-4138-b4ee-601515d0e04a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.883645] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1051.883822] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1051.884541] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82a62d34-b15c-4264-9b52-eed38aea0e08 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.889698] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1051.889698] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528260c4-02cc-5369-1ca2-685b78a9faf5" [ 1051.889698] env[68437]: _type = "Task" [ 1051.889698] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.898339] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528260c4-02cc-5369-1ca2-685b78a9faf5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.942243] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944688, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461921} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.942479] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] ee0450b5-66ce-41ed-9f4f-7ffa7b46f769/ee0450b5-66ce-41ed-9f4f-7ffa7b46f769.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1051.942710] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.942954] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64d1a90e-7640-40d2-b00e-7500115aa053 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.950112] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1051.950112] env[68437]: value = "task-2944689" [ 1051.950112] env[68437]: _type = "Task" [ 1051.950112] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.957898] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944689, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.145409] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.150046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 6.679s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.150251] env[68437]: DEBUG nova.objects.instance [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1052.152835] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.153060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.164171] env[68437]: INFO nova.scheduler.client.report [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleted allocations for instance 3f82b137-81d5-4754-b222-3cefce0b2a10 [ 1052.182289] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.306581] env[68437]: DEBUG oslo_concurrency.lockutils [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] Releasing lock "refresh_cache-832697dc-53ec-406d-b698-d10766bd8f9d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.306847] env[68437]: DEBUG nova.compute.manager [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Received event network-vif-deleted-2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1052.307036] env[68437]: INFO nova.compute.manager [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Neutron deleted interface 2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21; detaching it from the instance and deleting it from the info cache [ 1052.307218] env[68437]: DEBUG nova.network.neutron [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.400116] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528260c4-02cc-5369-1ca2-685b78a9faf5, 'name': SearchDatastore_Task, 'duration_secs': 0.011599} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.400872] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d13ccac-37c7-4314-813c-ccabc632dc50 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.405768] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1052.405768] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5232f7d1-0d39-98f3-b8d1-2431f7bc4152" [ 1052.405768] env[68437]: _type = "Task" [ 1052.405768] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.414427] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5232f7d1-0d39-98f3-b8d1-2431f7bc4152, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.458881] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944689, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068871} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.459159] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1052.460013] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d629cc-3ebb-4e38-9391-ce724cf01ed2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.481106] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] ee0450b5-66ce-41ed-9f4f-7ffa7b46f769/ee0450b5-66ce-41ed-9f4f-7ffa7b46f769.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.481593] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75e85678-d297-4916-961d-1eace245d438 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.496101] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1052.496334] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591076', 'volume_id': '6e750883-5e0a-415f-9685-e2ba3a3826d6', 'name': 'volume-6e750883-5e0a-415f-9685-e2ba3a3826d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '75a9fb57-5796-4853-b429-6e8ea7aba1de', 'attached_at': '', 'detached_at': '', 'volume_id': '6e750883-5e0a-415f-9685-e2ba3a3826d6', 'serial': '6e750883-5e0a-415f-9685-e2ba3a3826d6'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1052.497108] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085de4de-26bb-4c33-aa65-c5840d6b91f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.514237] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9f91b7-f0b4-4a69-83e2-b79639055ede {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.516821] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1052.516821] env[68437]: value = "task-2944690" [ 1052.516821] env[68437]: _type = "Task" [ 1052.516821] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.539672] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] volume-6e750883-5e0a-415f-9685-e2ba3a3826d6/volume-6e750883-5e0a-415f-9685-e2ba3a3826d6.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.540450] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7f275d9-4916-41c7-b747-43d394993c62 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.557038] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944690, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.562058] env[68437]: DEBUG oslo_vmware.api [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1052.562058] env[68437]: value = "task-2944691" [ 1052.562058] env[68437]: _type = "Task" [ 1052.562058] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.569634] env[68437]: DEBUG oslo_vmware.api [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944691, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.659093] env[68437]: DEBUG nova.compute.manager [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1052.671234] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c485487-1553-4b39-b21b-e66f798fc833 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "3f82b137-81d5-4754-b222-3cefce0b2a10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.560s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.809987] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dab63318-8053-445e-8499-c04acc838348 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.818511] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41b7cc7-0bd8-43c0-9224-49db3e1c0564 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.849536] env[68437]: DEBUG nova.compute.manager [req-d9147fa9-c82f-421f-90e7-af4135836135 req-e3eb6c2d-4545-41bc-b3f1-441fc5d8ffe7 service nova] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Detach interface failed, port_id=2c449a27-60e1-4e4c-aac4-7ba7a9cc1f21, reason: Instance 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1052.915707] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5232f7d1-0d39-98f3-b8d1-2431f7bc4152, 'name': SearchDatastore_Task, 'duration_secs': 0.009545} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.916028] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.916269] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 832697dc-53ec-406d-b698-d10766bd8f9d/832697dc-53ec-406d-b698-d10766bd8f9d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1052.916520] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b1da9f1-ddda-40f1-abe5-df1ebdb98b96 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.928675] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1052.928675] env[68437]: value = "task-2944692" [ 1052.928675] env[68437]: _type = "Task" [ 1052.928675] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.936395] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.026628] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944690, 'name': ReconfigVM_Task, 'duration_secs': 0.328109} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.026928] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Reconfigured VM instance instance-00000053 to attach disk [datastore2] ee0450b5-66ce-41ed-9f4f-7ffa7b46f769/ee0450b5-66ce-41ed-9f4f-7ffa7b46f769.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.027592] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-753ae6ac-7db3-460b-b5ae-497d9a84a51e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.033678] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1053.033678] env[68437]: value = "task-2944693" [ 1053.033678] env[68437]: _type = "Task" [ 1053.033678] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.041275] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944693, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.073204] env[68437]: DEBUG oslo_vmware.api [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944691, 'name': ReconfigVM_Task, 'duration_secs': 0.386708} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.073433] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Reconfigured VM instance instance-00000051 to attach disk [datastore2] volume-6e750883-5e0a-415f-9685-e2ba3a3826d6/volume-6e750883-5e0a-415f-9685-e2ba3a3826d6.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.078124] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01e8db81-53a3-4198-9bb0-7ec0e72b776f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.093179] env[68437]: DEBUG oslo_vmware.api [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1053.093179] env[68437]: value = "task-2944694" [ 1053.093179] env[68437]: _type = "Task" [ 1053.093179] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.105139] env[68437]: DEBUG oslo_vmware.api [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944694, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.164026] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bea12727-ac97-48b1-948d-d613483d06f8 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.171740] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.609s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.173607] env[68437]: INFO nova.compute.claims [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1053.189384] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.438768] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944692, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45931} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.439049] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 832697dc-53ec-406d-b698-d10766bd8f9d/832697dc-53ec-406d-b698-d10766bd8f9d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1053.439272] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1053.439531] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c879623-18ec-4d00-ae53-ada3a70f5f40 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.445174] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1053.445174] env[68437]: value = "task-2944695" [ 1053.445174] env[68437]: _type = "Task" [ 1053.445174] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.452667] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944695, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.543825] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944693, 'name': Rename_Task, 'duration_secs': 0.238428} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.544291] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.544847] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f18e15ad-0b38-4e73-a7a6-2bc49e27acf7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.550488] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1053.550488] env[68437]: value = "task-2944696" [ 1053.550488] env[68437]: _type = "Task" [ 1053.550488] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.557868] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944696, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.602559] env[68437]: DEBUG oslo_vmware.api [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944694, 'name': ReconfigVM_Task, 'duration_secs': 0.180134} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.602884] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591076', 'volume_id': '6e750883-5e0a-415f-9685-e2ba3a3826d6', 'name': 'volume-6e750883-5e0a-415f-9685-e2ba3a3826d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '75a9fb57-5796-4853-b429-6e8ea7aba1de', 'attached_at': '', 'detached_at': '', 'volume_id': '6e750883-5e0a-415f-9685-e2ba3a3826d6', 'serial': '6e750883-5e0a-415f-9685-e2ba3a3826d6'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1053.956039] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944695, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063094} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.956039] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1053.956704] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8776398-495c-4aef-9ed6-5bbbf73faf5c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.980240] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 832697dc-53ec-406d-b698-d10766bd8f9d/832697dc-53ec-406d-b698-d10766bd8f9d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.980548] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c19bc98d-7bfc-4db8-9c15-5ae671a75a68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.999540] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1053.999540] env[68437]: value = "task-2944697" [ 1053.999540] env[68437]: _type = "Task" [ 1053.999540] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.007030] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944697, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.060899] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944696, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.397103] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13083551-52af-4f67-90de-8d5e9bcf96ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.404104] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac72a3e2-ed97-444b-a46a-d066ab1c795f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.435816] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c40412-bcc6-4804-a18b-2f88fa05f7fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.443119] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2515e005-70df-402e-b326-9305c90fe8da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.456123] env[68437]: DEBUG nova.compute.provider_tree [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.509733] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944697, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.560681] env[68437]: DEBUG oslo_vmware.api [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944696, 'name': PowerOnVM_Task, 'duration_secs': 0.849816} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.560984] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.561208] env[68437]: INFO nova.compute.manager [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Took 8.22 seconds to spawn the instance on the hypervisor. [ 1054.561389] env[68437]: DEBUG nova.compute.manager [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.562179] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0746f1-fd01-45f9-9584-ea4a948286f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.651911] env[68437]: DEBUG nova.objects.instance [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'flavor' on Instance uuid 75a9fb57-5796-4853-b429-6e8ea7aba1de {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.769104] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.874727] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "e2143e07-8c8d-4008-bb73-29aae91baee7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.874955] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.960062] env[68437]: DEBUG nova.scheduler.client.report [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1055.009505] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944697, 'name': ReconfigVM_Task, 'duration_secs': 0.559533} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.009780] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 832697dc-53ec-406d-b698-d10766bd8f9d/832697dc-53ec-406d-b698-d10766bd8f9d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.010425] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-819d0ebd-c028-47f6-a5ac-7b439089f302 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.016575] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1055.016575] env[68437]: value = "task-2944698" [ 1055.016575] env[68437]: _type = "Task" [ 1055.016575] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.030101] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944698, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.081207] env[68437]: INFO nova.compute.manager [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Took 15.36 seconds to build instance. [ 1055.156907] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4bfbcef4-6c16-4555-ac3f-4a161691ffd8 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.783s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.157824] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.389s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.158068] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "75a9fb57-5796-4853-b429-6e8ea7aba1de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.158281] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.158447] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.160548] env[68437]: INFO nova.compute.manager [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Terminating instance [ 1055.377926] env[68437]: DEBUG nova.compute.manager [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1055.464227] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.292s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.465025] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1055.467349] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.630s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.469102] env[68437]: INFO nova.compute.claims [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.525870] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944698, 'name': Rename_Task, 'duration_secs': 0.402289} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.526184] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.526443] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a0b0600-0865-4564-9bb9-c7fd346a9827 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.533172] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1055.533172] env[68437]: value = "task-2944699" [ 1055.533172] env[68437]: _type = "Task" [ 1055.533172] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.541813] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944699, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.581973] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8da63c2c-9b4b-479b-b2e8-789d00721360 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.874s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.663947] env[68437]: DEBUG nova.compute.manager [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1055.664237] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.664625] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3de557b-62cf-4450-97fa-826b5b9dc131 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.671930] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1055.671930] env[68437]: value = "task-2944700" [ 1055.671930] env[68437]: _type = "Task" [ 1055.671930] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.684163] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944700, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.913618] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.959393] env[68437]: DEBUG nova.compute.manager [req-3e510269-1b3c-4623-94c0-b6f0664d3763 req-1ecf191e-6618-4299-bfaa-1f30daf92eab service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Received event network-changed-e35bd2c3-ec8e-4c14-90f7-e714e59882b2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1055.959682] env[68437]: DEBUG nova.compute.manager [req-3e510269-1b3c-4623-94c0-b6f0664d3763 req-1ecf191e-6618-4299-bfaa-1f30daf92eab service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Refreshing instance network info cache due to event network-changed-e35bd2c3-ec8e-4c14-90f7-e714e59882b2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1055.959946] env[68437]: DEBUG oslo_concurrency.lockutils [req-3e510269-1b3c-4623-94c0-b6f0664d3763 req-1ecf191e-6618-4299-bfaa-1f30daf92eab service nova] Acquiring lock "refresh_cache-ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.960171] env[68437]: DEBUG oslo_concurrency.lockutils [req-3e510269-1b3c-4623-94c0-b6f0664d3763 req-1ecf191e-6618-4299-bfaa-1f30daf92eab service nova] Acquired lock "refresh_cache-ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.960409] env[68437]: DEBUG nova.network.neutron [req-3e510269-1b3c-4623-94c0-b6f0664d3763 req-1ecf191e-6618-4299-bfaa-1f30daf92eab service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Refreshing network info cache for port e35bd2c3-ec8e-4c14-90f7-e714e59882b2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1055.974046] env[68437]: DEBUG nova.compute.utils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1055.976937] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1055.977147] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1056.034919] env[68437]: DEBUG nova.policy [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc898250d37545019deed31238cbb4e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a252ebf0cf6948f6ae2fc93b773cd15e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1056.046472] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944699, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.181374] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944700, 'name': PowerOffVM_Task, 'duration_secs': 0.191257} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.181663] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.181867] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1056.182082] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591076', 'volume_id': '6e750883-5e0a-415f-9685-e2ba3a3826d6', 'name': 'volume-6e750883-5e0a-415f-9685-e2ba3a3826d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '75a9fb57-5796-4853-b429-6e8ea7aba1de', 'attached_at': '', 'detached_at': '', 'volume_id': '6e750883-5e0a-415f-9685-e2ba3a3826d6', 'serial': '6e750883-5e0a-415f-9685-e2ba3a3826d6'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1056.182860] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31217b60-1d37-4d12-8f3e-e0a644a01f30 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.204843] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c403da4-300a-46b1-8f59-ca69298b1b46 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.212019] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc9fe79-9960-4739-8c11-d5d45eb94fc3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.233797] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa20ebc-e22a-4b19-bbaa-de257b311937 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.250625] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] The volume has not been displaced from its original location: [datastore2] volume-6e750883-5e0a-415f-9685-e2ba3a3826d6/volume-6e750883-5e0a-415f-9685-e2ba3a3826d6.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1056.257351] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1056.257351] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c8e644a-8bd6-4630-b3d1-bf41a05f9eca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.275892] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1056.275892] env[68437]: value = "task-2944701" [ 1056.275892] env[68437]: _type = "Task" [ 1056.275892] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.285060] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944701, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.301272] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Successfully created port: 9af0d88e-b67d-45be-baec-62c7b8a6f80b {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.478167] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1056.553381] env[68437]: DEBUG oslo_vmware.api [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944699, 'name': PowerOnVM_Task, 'duration_secs': 0.583083} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.553763] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.553995] env[68437]: INFO nova.compute.manager [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Took 7.83 seconds to spawn the instance on the hypervisor. [ 1056.554215] env[68437]: DEBUG nova.compute.manager [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.555058] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7941e2f9-06a3-43a0-867f-240f8fcaf9b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.757769] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66e0243-c11e-4eb8-89f8-6bd6c0e49215 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.765956] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3087ec9f-d6ae-4e35-9450-49f94b04f06b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.799928] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d2968b-2078-49cb-97e4-ad81c11751b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.809988] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a90ca6a-b517-4329-9465-a2dbae51e1e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.813845] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944701, 'name': ReconfigVM_Task, 'duration_secs': 0.341734} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.814132] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1056.819145] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9139a2f-ade4-4d55-a37c-02eb0999b4cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.837639] env[68437]: DEBUG nova.compute.provider_tree [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.844432] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1056.844432] env[68437]: value = "task-2944702" [ 1056.844432] env[68437]: _type = "Task" [ 1056.844432] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.852193] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944702, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.048258] env[68437]: DEBUG nova.network.neutron [req-3e510269-1b3c-4623-94c0-b6f0664d3763 req-1ecf191e-6618-4299-bfaa-1f30daf92eab service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Updated VIF entry in instance network info cache for port e35bd2c3-ec8e-4c14-90f7-e714e59882b2. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1057.049850] env[68437]: DEBUG nova.network.neutron [req-3e510269-1b3c-4623-94c0-b6f0664d3763 req-1ecf191e-6618-4299-bfaa-1f30daf92eab service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Updating instance_info_cache with network_info: [{"id": "e35bd2c3-ec8e-4c14-90f7-e714e59882b2", "address": "fa:16:3e:ae:33:4f", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape35bd2c3-ec", "ovs_interfaceid": "e35bd2c3-ec8e-4c14-90f7-e714e59882b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.078940] env[68437]: INFO nova.compute.manager [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Took 16.62 seconds to build instance. [ 1057.341433] env[68437]: DEBUG nova.scheduler.client.report [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.357831] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944702, 'name': ReconfigVM_Task, 'duration_secs': 0.188101} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.358338] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591076', 'volume_id': '6e750883-5e0a-415f-9685-e2ba3a3826d6', 'name': 'volume-6e750883-5e0a-415f-9685-e2ba3a3826d6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '75a9fb57-5796-4853-b429-6e8ea7aba1de', 'attached_at': '', 'detached_at': '', 'volume_id': '6e750883-5e0a-415f-9685-e2ba3a3826d6', 'serial': '6e750883-5e0a-415f-9685-e2ba3a3826d6'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1057.358722] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.359518] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f25c571-a5d3-4f22-bd4a-05d051d142bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.367423] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.367842] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83f8ea8b-81f8-4e4b-91fc-5c2f481d247b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.454050] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.454050] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.454201] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleting the datastore file [datastore1] 75a9fb57-5796-4853-b429-6e8ea7aba1de {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.454438] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a44aecde-3303-42f8-94b0-db1500f9c380 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.460758] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1057.460758] env[68437]: value = "task-2944704" [ 1057.460758] env[68437]: _type = "Task" [ 1057.460758] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.468805] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.489246] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1057.515470] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1057.515771] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.515971] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1057.516185] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.516348] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1057.516550] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1057.516902] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1057.517098] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1057.517277] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1057.517494] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1057.517606] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1057.518463] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c30fc5a-1fce-43ae-935d-0ebb65a0b523 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.526501] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a4bdae-36de-4378-b8e4-42f876f5bc2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.551774] env[68437]: DEBUG oslo_concurrency.lockutils [req-3e510269-1b3c-4623-94c0-b6f0664d3763 req-1ecf191e-6618-4299-bfaa-1f30daf92eab service nova] Releasing lock "refresh_cache-ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.581745] env[68437]: DEBUG oslo_concurrency.lockutils [None req-13f92977-bf7f-40b9-b594-15e054e33ef7 tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "832697dc-53ec-406d-b698-d10766bd8f9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.129s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.688282] env[68437]: DEBUG nova.compute.manager [req-0dbd985b-ff1e-4b87-87e3-f9a5821e3422 req-5c1a08fa-8ace-4e0c-93ad-05f9576292cc service nova] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Received event network-vif-plugged-9af0d88e-b67d-45be-baec-62c7b8a6f80b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1057.688514] env[68437]: DEBUG oslo_concurrency.lockutils [req-0dbd985b-ff1e-4b87-87e3-f9a5821e3422 req-5c1a08fa-8ace-4e0c-93ad-05f9576292cc service nova] Acquiring lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.688727] env[68437]: DEBUG oslo_concurrency.lockutils [req-0dbd985b-ff1e-4b87-87e3-f9a5821e3422 req-5c1a08fa-8ace-4e0c-93ad-05f9576292cc service nova] Lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.688910] env[68437]: DEBUG oslo_concurrency.lockutils [req-0dbd985b-ff1e-4b87-87e3-f9a5821e3422 req-5c1a08fa-8ace-4e0c-93ad-05f9576292cc service nova] Lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.689071] env[68437]: DEBUG nova.compute.manager [req-0dbd985b-ff1e-4b87-87e3-f9a5821e3422 req-5c1a08fa-8ace-4e0c-93ad-05f9576292cc service nova] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] No waiting events found dispatching network-vif-plugged-9af0d88e-b67d-45be-baec-62c7b8a6f80b {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1057.689250] env[68437]: WARNING nova.compute.manager [req-0dbd985b-ff1e-4b87-87e3-f9a5821e3422 req-5c1a08fa-8ace-4e0c-93ad-05f9576292cc service nova] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Received unexpected event network-vif-plugged-9af0d88e-b67d-45be-baec-62c7b8a6f80b for instance with vm_state building and task_state spawning. [ 1057.780615] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Successfully updated port: 9af0d88e-b67d-45be-baec-62c7b8a6f80b {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1057.846539] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.847171] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1057.849624] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.968s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.851303] env[68437]: INFO nova.compute.claims [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.970438] env[68437]: DEBUG oslo_vmware.api [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184525} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.970736] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.970927] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.971124] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.971302] env[68437]: INFO nova.compute.manager [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Took 2.31 seconds to destroy the instance on the hypervisor. [ 1057.971551] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1057.971744] env[68437]: DEBUG nova.compute.manager [-] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.971841] env[68437]: DEBUG nova.network.neutron [-] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1058.215302] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "832697dc-53ec-406d-b698-d10766bd8f9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.215646] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "832697dc-53ec-406d-b698-d10766bd8f9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.215759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "832697dc-53ec-406d-b698-d10766bd8f9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.215960] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "832697dc-53ec-406d-b698-d10766bd8f9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.216137] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "832697dc-53ec-406d-b698-d10766bd8f9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.218051] env[68437]: INFO nova.compute.manager [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Terminating instance [ 1058.285907] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "refresh_cache-987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.286130] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired lock "refresh_cache-987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.286399] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1058.356032] env[68437]: DEBUG nova.compute.utils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1058.359136] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1058.359136] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1058.393765] env[68437]: DEBUG nova.policy [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc898250d37545019deed31238cbb4e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a252ebf0cf6948f6ae2fc93b773cd15e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1058.627358] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Successfully created port: 583b37d4-09c0-4e4a-a343-e7f5898a9038 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.696396] env[68437]: DEBUG nova.network.neutron [-] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.722537] env[68437]: DEBUG nova.compute.manager [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1058.722537] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.722708] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2096535c-d449-4b89-b3c3-f6135a3d7b58 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.732624] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.732624] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b5693c1-278f-4a79-bb0b-f418996946c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.739446] env[68437]: DEBUG oslo_vmware.api [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1058.739446] env[68437]: value = "task-2944705" [ 1058.739446] env[68437]: _type = "Task" [ 1058.739446] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.750341] env[68437]: DEBUG oslo_vmware.api [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944705, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.820285] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1058.860477] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1059.010823] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Updating instance_info_cache with network_info: [{"id": "9af0d88e-b67d-45be-baec-62c7b8a6f80b", "address": "fa:16:3e:21:98:be", "network": {"id": "e3c051d1-46ee-4b1b-9d6f-14899b039018", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1165851796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a252ebf0cf6948f6ae2fc93b773cd15e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9af0d88e-b6", "ovs_interfaceid": "9af0d88e-b67d-45be-baec-62c7b8a6f80b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.078090] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d761d94-0227-4a3c-ac97-788a96f7330c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.085537] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c0eab4-048a-4b89-9385-6935b630a8a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.116820] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a4feb7-a53c-4f0b-87bd-7d2e1cc2e48f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.123889] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48406938-5416-42bc-b0a8-c61f1c3714d7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.137209] env[68437]: DEBUG nova.compute.provider_tree [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1059.199512] env[68437]: INFO nova.compute.manager [-] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Took 1.23 seconds to deallocate network for instance. [ 1059.251984] env[68437]: DEBUG oslo_vmware.api [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944705, 'name': PowerOffVM_Task, 'duration_secs': 0.225539} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.252324] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1059.252469] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1059.252725] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ab83efc-92a6-4503-ac16-a40d973c95e3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.312781] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1059.313010] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1059.313191] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleting the datastore file [datastore2] 832697dc-53ec-406d-b698-d10766bd8f9d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.313445] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7739c8f-ead3-42c9-a73d-8dd7d7d4db44 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.319789] env[68437]: DEBUG oslo_vmware.api [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for the task: (returnval){ [ 1059.319789] env[68437]: value = "task-2944707" [ 1059.319789] env[68437]: _type = "Task" [ 1059.319789] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.327511] env[68437]: DEBUG oslo_vmware.api [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944707, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.514166] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Releasing lock "refresh_cache-987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.514552] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Instance network_info: |[{"id": "9af0d88e-b67d-45be-baec-62c7b8a6f80b", "address": "fa:16:3e:21:98:be", "network": {"id": "e3c051d1-46ee-4b1b-9d6f-14899b039018", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1165851796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a252ebf0cf6948f6ae2fc93b773cd15e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9af0d88e-b6", "ovs_interfaceid": "9af0d88e-b67d-45be-baec-62c7b8a6f80b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1059.515331] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:98:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9af0d88e-b67d-45be-baec-62c7b8a6f80b', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1059.523288] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Creating folder: Project (a252ebf0cf6948f6ae2fc93b773cd15e). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1059.523567] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c02cab53-3ae8-4a33-8535-6d3afbc32918 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.535294] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Created folder: Project (a252ebf0cf6948f6ae2fc93b773cd15e) in parent group-v590848. [ 1059.535474] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Creating folder: Instances. Parent ref: group-v591081. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1059.535692] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5336694f-aa70-4838-b051-f7bdc6d4a156 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.544935] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Created folder: Instances in parent group-v591081. [ 1059.545161] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.545340] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1059.545525] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41f5e53c-1661-45f4-bf4a-6058386a40f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.562513] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1059.562513] env[68437]: value = "task-2944710" [ 1059.562513] env[68437]: _type = "Task" [ 1059.562513] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.569150] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944710, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.640393] env[68437]: DEBUG nova.scheduler.client.report [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1059.717277] env[68437]: DEBUG nova.compute.manager [req-bab26f45-8b47-45b9-9b52-5c6bd8cf4ff2 req-390a4c91-b0f5-40ab-8812-e7a16f7d6c34 service nova] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Received event network-changed-9af0d88e-b67d-45be-baec-62c7b8a6f80b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1059.717951] env[68437]: DEBUG nova.compute.manager [req-bab26f45-8b47-45b9-9b52-5c6bd8cf4ff2 req-390a4c91-b0f5-40ab-8812-e7a16f7d6c34 service nova] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Refreshing instance network info cache due to event network-changed-9af0d88e-b67d-45be-baec-62c7b8a6f80b. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1059.717951] env[68437]: DEBUG oslo_concurrency.lockutils [req-bab26f45-8b47-45b9-9b52-5c6bd8cf4ff2 req-390a4c91-b0f5-40ab-8812-e7a16f7d6c34 service nova] Acquiring lock "refresh_cache-987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.717951] env[68437]: DEBUG oslo_concurrency.lockutils [req-bab26f45-8b47-45b9-9b52-5c6bd8cf4ff2 req-390a4c91-b0f5-40ab-8812-e7a16f7d6c34 service nova] Acquired lock "refresh_cache-987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.718285] env[68437]: DEBUG nova.network.neutron [req-bab26f45-8b47-45b9-9b52-5c6bd8cf4ff2 req-390a4c91-b0f5-40ab-8812-e7a16f7d6c34 service nova] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Refreshing network info cache for port 9af0d88e-b67d-45be-baec-62c7b8a6f80b {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1059.752520] env[68437]: INFO nova.compute.manager [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Took 0.55 seconds to detach 1 volumes for instance. [ 1059.829087] env[68437]: DEBUG oslo_vmware.api [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Task: {'id': task-2944707, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139425} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.829311] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.829501] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.829675] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.829858] env[68437]: INFO nova.compute.manager [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1059.830110] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.830312] env[68437]: DEBUG nova.compute.manager [-] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1059.830404] env[68437]: DEBUG nova.network.neutron [-] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1059.871958] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1059.896709] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1059.896961] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.897177] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1059.897314] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.897470] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1059.897619] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1059.897824] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1059.897982] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1059.898168] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1059.898329] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1059.898501] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1059.899375] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0946fe-812b-458c-afa3-4e40cc672cb2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.906783] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4db34e-688c-4680-aabf-9c64c665cd74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.063258] env[68437]: DEBUG nova.compute.manager [req-71c1fd16-b8a6-4d37-b87b-60dbdf2f1fa5 req-6c0ce56c-0d1e-4de7-8af7-f523fca23e3e service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Received event network-vif-deleted-895256cb-13df-4810-9015-951c1dcea5a1 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1060.063409] env[68437]: INFO nova.compute.manager [req-71c1fd16-b8a6-4d37-b87b-60dbdf2f1fa5 req-6c0ce56c-0d1e-4de7-8af7-f523fca23e3e service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Neutron deleted interface 895256cb-13df-4810-9015-951c1dcea5a1; detaching it from the instance and deleting it from the info cache [ 1060.063524] env[68437]: DEBUG nova.network.neutron [req-71c1fd16-b8a6-4d37-b87b-60dbdf2f1fa5 req-6c0ce56c-0d1e-4de7-8af7-f523fca23e3e service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.074524] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944710, 'name': CreateVM_Task, 'duration_secs': 0.351696} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.075135] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1060.075821] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.075985] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.076310] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1060.077130] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Successfully updated port: 583b37d4-09c0-4e4a-a343-e7f5898a9038 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.078266] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ad3dc5b-9986-4316-8f89-5dd8e697421b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.084219] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1060.084219] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52650e0e-3c62-9b73-999b-fd3253c13e88" [ 1060.084219] env[68437]: _type = "Task" [ 1060.084219] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.093709] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52650e0e-3c62-9b73-999b-fd3253c13e88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.146016] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.296s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.146600] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1060.149140] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.795s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.149601] env[68437]: DEBUG nova.objects.instance [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lazy-loading 'resources' on Instance uuid f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.259232] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.401515] env[68437]: DEBUG nova.network.neutron [req-bab26f45-8b47-45b9-9b52-5c6bd8cf4ff2 req-390a4c91-b0f5-40ab-8812-e7a16f7d6c34 service nova] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Updated VIF entry in instance network info cache for port 9af0d88e-b67d-45be-baec-62c7b8a6f80b. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1060.401893] env[68437]: DEBUG nova.network.neutron [req-bab26f45-8b47-45b9-9b52-5c6bd8cf4ff2 req-390a4c91-b0f5-40ab-8812-e7a16f7d6c34 service nova] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Updating instance_info_cache with network_info: [{"id": "9af0d88e-b67d-45be-baec-62c7b8a6f80b", "address": "fa:16:3e:21:98:be", "network": {"id": "e3c051d1-46ee-4b1b-9d6f-14899b039018", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1165851796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a252ebf0cf6948f6ae2fc93b773cd15e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9af0d88e-b6", "ovs_interfaceid": "9af0d88e-b67d-45be-baec-62c7b8a6f80b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.546566] env[68437]: DEBUG nova.network.neutron [-] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.566191] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0c26a7e-05e8-45f9-b4e7-686c9d26c511 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.575129] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e15383-7f91-4810-ba96-fa9791b2cf00 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.587484] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "refresh_cache-b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.587589] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired lock "refresh_cache-b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.587731] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1060.597161] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52650e0e-3c62-9b73-999b-fd3253c13e88, 'name': SearchDatastore_Task, 'duration_secs': 0.010446} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.597869] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.598121] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1060.598333] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.598481] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.598658] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.599097] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e39f4db3-0c9e-4a1a-87db-740d81ca9f23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.609922] env[68437]: DEBUG nova.compute.manager [req-71c1fd16-b8a6-4d37-b87b-60dbdf2f1fa5 req-6c0ce56c-0d1e-4de7-8af7-f523fca23e3e service nova] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Detach interface failed, port_id=895256cb-13df-4810-9015-951c1dcea5a1, reason: Instance 832697dc-53ec-406d-b698-d10766bd8f9d could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1060.610999] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.611188] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1060.611832] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0587befe-37c1-470e-a9b9-41b55a2785a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.616270] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1060.616270] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5252628d-8418-1439-9026-bc5792279b28" [ 1060.616270] env[68437]: _type = "Task" [ 1060.616270] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.623352] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5252628d-8418-1439-9026-bc5792279b28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.651948] env[68437]: DEBUG nova.compute.utils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1060.656065] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1060.656242] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1060.704729] env[68437]: DEBUG nova.policy [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc898250d37545019deed31238cbb4e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a252ebf0cf6948f6ae2fc93b773cd15e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1060.864921] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac360ea-c97b-4add-b365-243bbd63626c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.874416] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da115a7e-f692-47ad-8889-6aa74b19aaf4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.904462] env[68437]: DEBUG oslo_concurrency.lockutils [req-bab26f45-8b47-45b9-9b52-5c6bd8cf4ff2 req-390a4c91-b0f5-40ab-8812-e7a16f7d6c34 service nova] Releasing lock "refresh_cache-987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.904740] env[68437]: DEBUG nova.compute.manager [req-bab26f45-8b47-45b9-9b52-5c6bd8cf4ff2 req-390a4c91-b0f5-40ab-8812-e7a16f7d6c34 service nova] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Received event network-vif-deleted-a1c03134-f2fd-4e15-a710-171032761276 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1060.905798] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1cbce5-4a2e-4eef-b1e1-f01c0d158511 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.912574] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375ad734-dfe4-4e93-ae39-f4e9ad95d58c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.926900] env[68437]: DEBUG nova.compute.provider_tree [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.935599] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Successfully created port: b9828641-08ba-49a4-8810-3049e52c44a2 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1061.048705] env[68437]: INFO nova.compute.manager [-] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Took 1.22 seconds to deallocate network for instance. [ 1061.121934] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1061.130038] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5252628d-8418-1439-9026-bc5792279b28, 'name': SearchDatastore_Task, 'duration_secs': 0.00908} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.130038] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4be20cd0-fb9f-4117-a14b-71462970bf70 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.134571] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1061.134571] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b40910-2692-b293-345c-17cd05746ebb" [ 1061.134571] env[68437]: _type = "Task" [ 1061.134571] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.141201] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b40910-2692-b293-345c-17cd05746ebb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.156492] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1061.356544] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Updating instance_info_cache with network_info: [{"id": "583b37d4-09c0-4e4a-a343-e7f5898a9038", "address": "fa:16:3e:ce:36:83", "network": {"id": "e3c051d1-46ee-4b1b-9d6f-14899b039018", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1165851796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a252ebf0cf6948f6ae2fc93b773cd15e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap583b37d4-09", "ovs_interfaceid": "583b37d4-09c0-4e4a-a343-e7f5898a9038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.430432] env[68437]: DEBUG nova.scheduler.client.report [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1061.555613] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.644159] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b40910-2692-b293-345c-17cd05746ebb, 'name': SearchDatastore_Task, 'duration_secs': 0.010166} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.644495] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.644783] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439/987ed4b2-5c7a-4c7b-a7a5-66b4e515b439.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1061.645085] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e8b576f-69c8-4941-8103-14270e18d786 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.651370] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1061.651370] env[68437]: value = "task-2944711" [ 1061.651370] env[68437]: _type = "Task" [ 1061.651370] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.659079] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944711, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.747086] env[68437]: DEBUG nova.compute.manager [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Received event network-vif-plugged-583b37d4-09c0-4e4a-a343-e7f5898a9038 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1061.747273] env[68437]: DEBUG oslo_concurrency.lockutils [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] Acquiring lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.747629] env[68437]: DEBUG oslo_concurrency.lockutils [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] Lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.747780] env[68437]: DEBUG oslo_concurrency.lockutils [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] Lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.747969] env[68437]: DEBUG nova.compute.manager [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] No waiting events found dispatching network-vif-plugged-583b37d4-09c0-4e4a-a343-e7f5898a9038 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1061.748161] env[68437]: WARNING nova.compute.manager [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Received unexpected event network-vif-plugged-583b37d4-09c0-4e4a-a343-e7f5898a9038 for instance with vm_state building and task_state spawning. [ 1061.748317] env[68437]: DEBUG nova.compute.manager [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Received event network-changed-583b37d4-09c0-4e4a-a343-e7f5898a9038 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1061.748475] env[68437]: DEBUG nova.compute.manager [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Refreshing instance network info cache due to event network-changed-583b37d4-09c0-4e4a-a343-e7f5898a9038. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1061.748636] env[68437]: DEBUG oslo_concurrency.lockutils [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] Acquiring lock "refresh_cache-b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.858857] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Releasing lock "refresh_cache-b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.859211] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Instance network_info: |[{"id": "583b37d4-09c0-4e4a-a343-e7f5898a9038", "address": "fa:16:3e:ce:36:83", "network": {"id": "e3c051d1-46ee-4b1b-9d6f-14899b039018", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1165851796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a252ebf0cf6948f6ae2fc93b773cd15e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap583b37d4-09", "ovs_interfaceid": "583b37d4-09c0-4e4a-a343-e7f5898a9038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1061.859540] env[68437]: DEBUG oslo_concurrency.lockutils [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] Acquired lock "refresh_cache-b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.859727] env[68437]: DEBUG nova.network.neutron [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Refreshing network info cache for port 583b37d4-09c0-4e4a-a343-e7f5898a9038 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1061.861061] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:36:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '583b37d4-09c0-4e4a-a343-e7f5898a9038', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1061.870191] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.871482] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1061.871800] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae2d5593-5883-4d15-a1e3-d0352d21e4ec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.894508] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1061.894508] env[68437]: value = "task-2944712" [ 1061.894508] env[68437]: _type = "Task" [ 1061.894508] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.904145] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944712, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.935797] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.786s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.938759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.756s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.939813] env[68437]: DEBUG nova.objects.instance [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lazy-loading 'resources' on Instance uuid 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1061.967382] env[68437]: INFO nova.scheduler.client.report [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted allocations for instance f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28 [ 1062.161368] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944711, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454563} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.161684] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439/987ed4b2-5c7a-4c7b-a7a5-66b4e515b439.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1062.161914] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1062.162208] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d34d2b4d-f180-46f0-af9b-a8f922552bfa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.164977] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1062.171276] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1062.171276] env[68437]: value = "task-2944713" [ 1062.171276] env[68437]: _type = "Task" [ 1062.171276] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.180535] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.191672] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1062.191965] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1062.192173] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1062.192414] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1062.192538] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1062.192690] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1062.192933] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1062.193158] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1062.193368] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1062.193567] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1062.193790] env[68437]: DEBUG nova.virt.hardware [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1062.194803] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8dc4e7-b8d1-4c45-a256-8fb44e95e38c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.202432] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de55fdc6-4c45-4afd-a1c4-a2ed7c913903 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.401749] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Successfully updated port: b9828641-08ba-49a4-8810-3049e52c44a2 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1062.406338] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944712, 'name': CreateVM_Task, 'duration_secs': 0.385349} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.406747] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1062.407403] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.407525] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.407833] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1062.408152] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9cb4097-e432-4489-96d6-f5125c751d18 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.412591] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1062.412591] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e5124c-1f88-87a6-db1c-05acbef0b345" [ 1062.412591] env[68437]: _type = "Task" [ 1062.412591] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.421364] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e5124c-1f88-87a6-db1c-05acbef0b345, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.479236] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6aab162f-9422-4139-a6ad-ca0dc4da2974 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.100s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.688065] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06734} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.688846] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8399fd0b-7857-4d16-acc6-a85a1bce3344 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.692100] env[68437]: DEBUG nova.network.neutron [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Updated VIF entry in instance network info cache for port 583b37d4-09c0-4e4a-a343-e7f5898a9038. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1062.692472] env[68437]: DEBUG nova.network.neutron [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Updating instance_info_cache with network_info: [{"id": "583b37d4-09c0-4e4a-a343-e7f5898a9038", "address": "fa:16:3e:ce:36:83", "network": {"id": "e3c051d1-46ee-4b1b-9d6f-14899b039018", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1165851796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a252ebf0cf6948f6ae2fc93b773cd15e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap583b37d4-09", "ovs_interfaceid": "583b37d4-09c0-4e4a-a343-e7f5898a9038", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.693673] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1062.694940] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f737c434-622e-4890-b389-82d2feb42f74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.711751] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae0dadd-0373-4353-89f6-81fdb44143f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.723283] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439/987ed4b2-5c7a-4c7b-a7a5-66b4e515b439.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1062.723885] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf5697b6-2837-4053-a560-1065e067e906 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.768640] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bbd737-d0e2-4f7e-8e7c-7249077976c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.771754] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1062.771754] env[68437]: value = "task-2944714" [ 1062.771754] env[68437]: _type = "Task" [ 1062.771754] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.779708] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691bd3c5-9bf5-4615-84e7-38f004a4450f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.787129] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944714, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.799040] env[68437]: DEBUG nova.compute.provider_tree [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.908320] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "refresh_cache-27429c12-ce0a-4e21-ac1b-6862a8063a9f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.908405] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired lock "refresh_cache-27429c12-ce0a-4e21-ac1b-6862a8063a9f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.908592] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1062.923765] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e5124c-1f88-87a6-db1c-05acbef0b345, 'name': SearchDatastore_Task, 'duration_secs': 0.010581} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.924074] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.924572] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1062.924654] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.924733] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.924915] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1062.925196] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81ce5848-c02a-4030-b272-e7bda949c6c6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.934378] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1062.934582] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1062.935354] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4a9f102-494d-48de-9280-91f230f9b6fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.940399] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1062.940399] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5234525e-95e7-c4b9-2548-556a080741f4" [ 1062.940399] env[68437]: _type = "Task" [ 1062.940399] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.948621] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5234525e-95e7-c4b9-2548-556a080741f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.196193] env[68437]: DEBUG oslo_concurrency.lockutils [req-0085924e-c580-464b-94fc-419810ced19a req-215f2d8f-73dd-4b9e-ab2c-ef8a8543b241 service nova] Releasing lock "refresh_cache-b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.283815] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944714, 'name': ReconfigVM_Task, 'duration_secs': 0.319614} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.284508] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439/987ed4b2-5c7a-4c7b-a7a5-66b4e515b439.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1063.286082] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a5ac122-265e-4349-98f2-608a17da818a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.294883] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1063.294883] env[68437]: value = "task-2944715" [ 1063.294883] env[68437]: _type = "Task" [ 1063.294883] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.305022] env[68437]: DEBUG nova.scheduler.client.report [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.318204] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944715, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.443167] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1063.451889] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5234525e-95e7-c4b9-2548-556a080741f4, 'name': SearchDatastore_Task, 'duration_secs': 0.012785} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.452852] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7413c2ac-1a62-49eb-98d4-efec8ed9c3e4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.462398] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1063.462398] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5290ad77-dcde-8772-6bee-ce332da57677" [ 1063.462398] env[68437]: _type = "Task" [ 1063.462398] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.469836] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5290ad77-dcde-8772-6bee-ce332da57677, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.583900] env[68437]: DEBUG nova.network.neutron [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Updating instance_info_cache with network_info: [{"id": "b9828641-08ba-49a4-8810-3049e52c44a2", "address": "fa:16:3e:d0:b0:fd", "network": {"id": "e3c051d1-46ee-4b1b-9d6f-14899b039018", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1165851796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a252ebf0cf6948f6ae2fc93b773cd15e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9828641-08", "ovs_interfaceid": "b9828641-08ba-49a4-8810-3049e52c44a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.809950] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944715, 'name': Rename_Task, 'duration_secs': 0.140283} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.810427] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1063.810804] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f277a4c-7fc7-4cb0-9d0c-09083bba4705 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.818050] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1063.818050] env[68437]: value = "task-2944716" [ 1063.818050] env[68437]: _type = "Task" [ 1063.818050] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.823574] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.885s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.828398] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.638s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.829397] env[68437]: INFO nova.compute.claims [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1063.838894] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944716, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.841292] env[68437]: DEBUG nova.compute.manager [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Received event network-vif-plugged-b9828641-08ba-49a4-8810-3049e52c44a2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1063.841563] env[68437]: DEBUG oslo_concurrency.lockutils [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] Acquiring lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.841907] env[68437]: DEBUG oslo_concurrency.lockutils [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] Lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.842144] env[68437]: DEBUG oslo_concurrency.lockutils [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] Lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.842354] env[68437]: DEBUG nova.compute.manager [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] No waiting events found dispatching network-vif-plugged-b9828641-08ba-49a4-8810-3049e52c44a2 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.842636] env[68437]: WARNING nova.compute.manager [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Received unexpected event network-vif-plugged-b9828641-08ba-49a4-8810-3049e52c44a2 for instance with vm_state building and task_state spawning. [ 1063.842762] env[68437]: DEBUG nova.compute.manager [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Received event network-changed-b9828641-08ba-49a4-8810-3049e52c44a2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1063.843492] env[68437]: DEBUG nova.compute.manager [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Refreshing instance network info cache due to event network-changed-b9828641-08ba-49a4-8810-3049e52c44a2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1063.843492] env[68437]: DEBUG oslo_concurrency.lockutils [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] Acquiring lock "refresh_cache-27429c12-ce0a-4e21-ac1b-6862a8063a9f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.849802] env[68437]: INFO nova.scheduler.client.report [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Deleted allocations for instance 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3 [ 1063.972250] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5290ad77-dcde-8772-6bee-ce332da57677, 'name': SearchDatastore_Task, 'duration_secs': 0.010206} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.972546] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.972808] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] b81a414d-51bf-4f08-b0d3-a19a7aa4efe5/b81a414d-51bf-4f08-b0d3-a19a7aa4efe5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1063.973088] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd93cd3c-9030-474c-a84a-e981b892c85a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.980311] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1063.980311] env[68437]: value = "task-2944717" [ 1063.980311] env[68437]: _type = "Task" [ 1063.980311] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.988174] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.086714] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Releasing lock "refresh_cache-27429c12-ce0a-4e21-ac1b-6862a8063a9f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.087029] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Instance network_info: |[{"id": "b9828641-08ba-49a4-8810-3049e52c44a2", "address": "fa:16:3e:d0:b0:fd", "network": {"id": "e3c051d1-46ee-4b1b-9d6f-14899b039018", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1165851796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a252ebf0cf6948f6ae2fc93b773cd15e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9828641-08", "ovs_interfaceid": "b9828641-08ba-49a4-8810-3049e52c44a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1064.087384] env[68437]: DEBUG oslo_concurrency.lockutils [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] Acquired lock "refresh_cache-27429c12-ce0a-4e21-ac1b-6862a8063a9f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.087659] env[68437]: DEBUG nova.network.neutron [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Refreshing network info cache for port b9828641-08ba-49a4-8810-3049e52c44a2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1064.088914] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:b0:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a555680e-4721-4509-97e4-ced9dc17c13e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9828641-08ba-49a4-8810-3049e52c44a2', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1064.096879] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1064.097890] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1064.098166] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d503166f-9d83-4556-b9a0-5ebe960f33fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.118590] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1064.118590] env[68437]: value = "task-2944718" [ 1064.118590] env[68437]: _type = "Task" [ 1064.118590] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.126316] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944718, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.331027] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944716, 'name': PowerOnVM_Task, 'duration_secs': 0.470681} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.331027] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1064.331027] env[68437]: INFO nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Took 6.84 seconds to spawn the instance on the hypervisor. [ 1064.331027] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1064.331744] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b209430d-ea07-4e3a-8462-69910f77f3ee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.358287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9a87a858-cd3c-4ecb-979f-87b037cd2c58 tempest-InstanceActionsV221TestJSON-2037702812 tempest-InstanceActionsV221TestJSON-2037702812-project-member] Lock "33cc7565-9cd0-47a7-afe2-ac3849ba7ac3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.582s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.491274] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944717, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.518055] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "76d97a56-21a2-4363-a987-ef872f056510" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.518334] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.629031] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944718, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.836074] env[68437]: DEBUG nova.network.neutron [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Updated VIF entry in instance network info cache for port b9828641-08ba-49a4-8810-3049e52c44a2. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1064.836509] env[68437]: DEBUG nova.network.neutron [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Updating instance_info_cache with network_info: [{"id": "b9828641-08ba-49a4-8810-3049e52c44a2", "address": "fa:16:3e:d0:b0:fd", "network": {"id": "e3c051d1-46ee-4b1b-9d6f-14899b039018", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1165851796-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a252ebf0cf6948f6ae2fc93b773cd15e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a555680e-4721-4509-97e4-ced9dc17c13e", "external-id": "nsx-vlan-transportzone-4", "segmentation_id": 4, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9828641-08", "ovs_interfaceid": "b9828641-08ba-49a4-8810-3049e52c44a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.854346] env[68437]: INFO nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Took 19.31 seconds to build instance. [ 1064.990268] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944717, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525812} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.992796] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] b81a414d-51bf-4f08-b0d3-a19a7aa4efe5/b81a414d-51bf-4f08-b0d3-a19a7aa4efe5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1064.993030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1064.994323] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04631f22-fbf1-4054-b86f-5abbc0e708f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.001839] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1065.001839] env[68437]: value = "task-2944719" [ 1065.001839] env[68437]: _type = "Task" [ 1065.001839] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.012161] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.020683] env[68437]: DEBUG nova.compute.manager [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1065.080405] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03884371-9a38-4bd0-be99-093b72f94dac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.091153] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850bd785-f765-4b5c-9c84-20aa1fcd7b7d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.126528] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84a550e-25ba-49de-8703-99d222f94bf0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.133748] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944718, 'name': CreateVM_Task, 'duration_secs': 0.575247} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.135781] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1065.136507] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.136697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.137025] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1065.138200] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94e6a96-1330-4f6a-a49d-b37e41a92384 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.141735] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-271d6aff-8f53-48b6-b10c-ea1a63b28b1b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.153529] env[68437]: DEBUG nova.compute.provider_tree [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.156464] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1065.156464] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5216c4e2-28a6-dcde-6caa-d56b91c5a053" [ 1065.156464] env[68437]: _type = "Task" [ 1065.156464] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.164949] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5216c4e2-28a6-dcde-6caa-d56b91c5a053, 'name': SearchDatastore_Task, 'duration_secs': 0.009283} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.165251] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.165486] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1065.165714] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.165861] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.166046] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1065.166359] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c63384f8-5d17-48e3-8b12-26334203945d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.173912] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1065.174104] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1065.174798] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08979adb-9330-45a0-b22f-defb550245c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.179284] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1065.179284] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5276af2e-2ac2-ed7d-5343-96ae89f7ca8a" [ 1065.179284] env[68437]: _type = "Task" [ 1065.179284] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.186985] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5276af2e-2ac2-ed7d-5343-96ae89f7ca8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.343380] env[68437]: DEBUG oslo_concurrency.lockutils [req-80aab900-709f-441f-8095-ee0a7e2c71bb req-5b5ab6e1-3cbd-47d4-8720-b987ca5b70f7 service nova] Releasing lock "refresh_cache-27429c12-ce0a-4e21-ac1b-6862a8063a9f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.356287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.819s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.516501] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.516858] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1065.517787] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebaccff-8d4f-4dfd-bf6e-ce3a6d4a17a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.545903] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] b81a414d-51bf-4f08-b0d3-a19a7aa4efe5/b81a414d-51bf-4f08-b0d3-a19a7aa4efe5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.546695] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee4f5c6e-7c1e-4497-b933-85b05fc55576 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.561698] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.567727] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1065.567727] env[68437]: value = "task-2944720" [ 1065.567727] env[68437]: _type = "Task" [ 1065.567727] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.575231] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944720, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.656474] env[68437]: DEBUG nova.scheduler.client.report [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.689788] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5276af2e-2ac2-ed7d-5343-96ae89f7ca8a, 'name': SearchDatastore_Task, 'duration_secs': 0.00833} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.690602] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4c4b80e-249a-44c6-8f50-677af5003a78 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.695685] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1065.695685] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f91c81-0e02-82a8-5eb6-f077afeba98c" [ 1065.695685] env[68437]: _type = "Task" [ 1065.695685] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.704871] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f91c81-0e02-82a8-5eb6-f077afeba98c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.077952] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944720, 'name': ReconfigVM_Task, 'duration_secs': 0.315831} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.078328] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Reconfigured VM instance instance-00000056 to attach disk [datastore1] b81a414d-51bf-4f08-b0d3-a19a7aa4efe5/b81a414d-51bf-4f08-b0d3-a19a7aa4efe5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.081018] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0533df50-4486-4994-b39a-6d61de052134 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.085142] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1066.085142] env[68437]: value = "task-2944721" [ 1066.085142] env[68437]: _type = "Task" [ 1066.085142] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.092694] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944721, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.162612] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.163173] env[68437]: DEBUG nova.compute.manager [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1066.165831] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.253s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.167294] env[68437]: INFO nova.compute.claims [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1066.208207] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f91c81-0e02-82a8-5eb6-f077afeba98c, 'name': SearchDatastore_Task, 'duration_secs': 0.011629} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.208477] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.208802] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 27429c12-ce0a-4e21-ac1b-6862a8063a9f/27429c12-ce0a-4e21-ac1b-6862a8063a9f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1066.209118] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3d8d75e-a784-4a7b-9fd1-306ecf24f443 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.216304] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1066.216304] env[68437]: value = "task-2944722" [ 1066.216304] env[68437]: _type = "Task" [ 1066.216304] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.229405] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944722, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.597117] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944721, 'name': Rename_Task, 'duration_secs': 0.14516} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.597468] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1066.597679] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d53a47a-5b2b-4bb7-8b5c-67593f7ab2d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.605076] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1066.605076] env[68437]: value = "task-2944723" [ 1066.605076] env[68437]: _type = "Task" [ 1066.605076] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.615911] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944723, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.676683] env[68437]: DEBUG nova.compute.utils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1066.678077] env[68437]: DEBUG nova.compute.manager [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1066.726368] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944722, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493482} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.726646] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 27429c12-ce0a-4e21-ac1b-6862a8063a9f/27429c12-ce0a-4e21-ac1b-6862a8063a9f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1066.726863] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1066.727133] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-132bb117-2050-4a3a-92ef-8dd0fc73f068 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.733131] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1066.733131] env[68437]: value = "task-2944724" [ 1066.733131] env[68437]: _type = "Task" [ 1066.733131] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.741430] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944724, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.122531] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944723, 'name': PowerOnVM_Task, 'duration_secs': 0.492111} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.122531] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.122531] env[68437]: INFO nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Took 7.25 seconds to spawn the instance on the hypervisor. [ 1067.122531] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.123056] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9201af7-4d31-4cc7-bc2d-1094c6442401 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.183684] env[68437]: DEBUG nova.compute.manager [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1067.245820] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944724, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064162} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.246111] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1067.246897] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70eb6cbd-db0d-4de6-b196-47b12ee6727f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.275236] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 27429c12-ce0a-4e21-ac1b-6862a8063a9f/27429c12-ce0a-4e21-ac1b-6862a8063a9f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1067.278537] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3f1b799-1954-4700-9bf7-6e6d4120038a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.302154] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1067.302154] env[68437]: value = "task-2944725" [ 1067.302154] env[68437]: _type = "Task" [ 1067.302154] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.312138] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944725, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.476866] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc23c0-c926-4868-81cb-6fb3abaad8ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.492501] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6868be3a-631c-4a2e-8447-cec7fe351f7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.525364] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d4bcdb-a84c-47d6-8627-7a988c4c70dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.533467] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d662116a-5fd6-4e06-8565-1c474d895ead {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.548414] env[68437]: DEBUG nova.compute.provider_tree [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.639935] env[68437]: INFO nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Took 21.82 seconds to build instance. [ 1067.812140] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.052326] env[68437]: DEBUG nova.scheduler.client.report [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.057369] env[68437]: DEBUG oslo_concurrency.lockutils [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.057525] env[68437]: DEBUG oslo_concurrency.lockutils [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.142225] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.568s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.197263] env[68437]: DEBUG nova.compute.manager [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1068.225840] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1068.226436] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1068.226608] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1068.226794] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1068.226942] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1068.227106] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1068.227329] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1068.227488] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1068.227687] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1068.227867] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1068.228206] env[68437]: DEBUG nova.virt.hardware [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1068.229834] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942c42fc-2cac-42a8-b29b-d1f13c9d08e3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.239563] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90764d3-57c3-45f3-95a6-d2b94c9742b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.254137] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1068.260351] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Creating folder: Project (a96fdf1265704cdbbafc321c1e18d54f). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1068.260667] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98327657-025d-44a9-8bda-5bd974a2790a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.272073] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Created folder: Project (a96fdf1265704cdbbafc321c1e18d54f) in parent group-v590848. [ 1068.272279] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Creating folder: Instances. Parent ref: group-v591086. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1068.272515] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b20e1c4f-26d7-4d87-a901-c479327acba8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.282497] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Created folder: Instances in parent group-v591086. [ 1068.282732] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.282930] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1068.283155] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1eaf87c1-01d1-4818-8650-c8b29a8933dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.300648] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1068.300648] env[68437]: value = "task-2944728" [ 1068.300648] env[68437]: _type = "Task" [ 1068.300648] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.311638] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944728, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.316239] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.560937] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.561589] env[68437]: DEBUG nova.compute.manager [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1068.566822] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.308s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.567324] env[68437]: DEBUG nova.objects.instance [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'resources' on Instance uuid 75a9fb57-5796-4853-b429-6e8ea7aba1de {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.568726] env[68437]: INFO nova.compute.manager [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Detaching volume 4f284da8-83f9-4c81-9fc5-d241f2cbc05b [ 1068.618547] env[68437]: INFO nova.virt.block_device [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Attempting to driver detach volume 4f284da8-83f9-4c81-9fc5-d241f2cbc05b from mountpoint /dev/sdb [ 1068.618894] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1068.618970] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591059', 'volume_id': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'name': 'volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a01364f9-e30d-4140-ae41-1e7c4aaa2251', 'attached_at': '', 'detached_at': '', 'volume_id': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'serial': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1068.620393] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572e213f-3434-4de3-b747-64b1deccd809 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.646695] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cc3a21-6d38-41e7-b256-e53e21e72931 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.653845] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ac4a81-e46c-406a-910e-ca8ecba878b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.676836] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04bf20b-6098-4f5f-9067-e840a367a9ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.693331] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] The volume has not been displaced from its original location: [datastore2] volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b/volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1068.700074] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfiguring VM instance instance-0000002d to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1068.700074] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e0ebfdb-3bde-4001-b352-2c06ad18141d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.718044] env[68437]: DEBUG oslo_vmware.api [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1068.718044] env[68437]: value = "task-2944729" [ 1068.718044] env[68437]: _type = "Task" [ 1068.718044] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.728865] env[68437]: DEBUG oslo_vmware.api [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944729, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.814016] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944728, 'name': CreateVM_Task, 'duration_secs': 0.326169} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.817450] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.817952] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944725, 'name': ReconfigVM_Task, 'duration_secs': 1.140889} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.818513] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.818513] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.819051] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1068.819515] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 27429c12-ce0a-4e21-ac1b-6862a8063a9f/27429c12-ce0a-4e21-ac1b-6862a8063a9f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1068.819934] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f744951-c199-4aae-84da-3fc9774016af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.821525] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-61c2889f-12c2-49ac-863b-bd3add7d44e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.826792] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1068.826792] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d1d66a-27a1-fec1-201a-a533e154d31e" [ 1068.826792] env[68437]: _type = "Task" [ 1068.826792] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.831307] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1068.831307] env[68437]: value = "task-2944730" [ 1068.831307] env[68437]: _type = "Task" [ 1068.831307] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.843022] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d1d66a-27a1-fec1-201a-a533e154d31e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.843436] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944730, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.075970] env[68437]: DEBUG nova.compute.utils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1069.080878] env[68437]: DEBUG nova.compute.manager [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1069.081968] env[68437]: DEBUG nova.network.neutron [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1069.146961] env[68437]: DEBUG nova.policy [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '894a53f57a104c51945fa90c168a0483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68baf1daffa842b4adb854fe0cec9524', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1069.228911] env[68437]: DEBUG oslo_vmware.api [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944729, 'name': ReconfigVM_Task, 'duration_secs': 0.310481} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.229244] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Reconfigured VM instance instance-0000002d to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1069.236451] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-874de79b-b872-41a0-bd0e-726834bec0b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.252448] env[68437]: DEBUG oslo_vmware.api [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1069.252448] env[68437]: value = "task-2944731" [ 1069.252448] env[68437]: _type = "Task" [ 1069.252448] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.262421] env[68437]: DEBUG oslo_vmware.api [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944731, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.313462] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a09b745-93b6-43df-98c4-0ed2b86c9f9a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.320668] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792b24d9-884e-4b62-b5c8-50010c83525a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.358819] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298b67fc-dc6b-411e-bd04-4a14aedac4d5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.364770] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d1d66a-27a1-fec1-201a-a533e154d31e, 'name': SearchDatastore_Task, 'duration_secs': 0.013997} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.365735] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.365979] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1069.366241] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.366396] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.367161] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1069.369592] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4de12c62-6170-45ea-801a-0951323f88fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.371336] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944730, 'name': Rename_Task, 'duration_secs': 0.146337} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.373399] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1069.373973] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71f78710-a40a-4b41-b632-98c7bd1ec1fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.376488] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b2cabb-7751-4a47-bdc1-4db1d1227a3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.381665] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1069.381778] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1069.383057] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e73375ba-d10a-4d9e-9d5e-b1d532c870f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.393580] env[68437]: DEBUG nova.compute.provider_tree [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.397106] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1069.397106] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523d52a7-a711-29d5-592a-7fc83f5f1a3e" [ 1069.397106] env[68437]: _type = "Task" [ 1069.397106] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.397361] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1069.397361] env[68437]: value = "task-2944732" [ 1069.397361] env[68437]: _type = "Task" [ 1069.397361] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.410114] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523d52a7-a711-29d5-592a-7fc83f5f1a3e, 'name': SearchDatastore_Task, 'duration_secs': 0.009192} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.413068] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944732, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.413317] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe0121d6-c1f8-4055-9aa9-ec48ee88b2d5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.418682] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1069.418682] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521e3145-16eb-080f-732a-30a3efb49ec5" [ 1069.418682] env[68437]: _type = "Task" [ 1069.418682] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.426974] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521e3145-16eb-080f-732a-30a3efb49ec5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.584189] env[68437]: DEBUG nova.compute.manager [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1069.597622] env[68437]: DEBUG nova.network.neutron [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Successfully created port: a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1069.762457] env[68437]: DEBUG oslo_vmware.api [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944731, 'name': ReconfigVM_Task, 'duration_secs': 0.175092} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.762928] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591059', 'volume_id': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'name': 'volume-4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a01364f9-e30d-4140-ae41-1e7c4aaa2251', 'attached_at': '', 'detached_at': '', 'volume_id': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b', 'serial': '4f284da8-83f9-4c81-9fc5-d241f2cbc05b'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1069.902497] env[68437]: DEBUG nova.scheduler.client.report [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.914195] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944732, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.930428] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521e3145-16eb-080f-732a-30a3efb49ec5, 'name': SearchDatastore_Task, 'duration_secs': 0.009412} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.930748] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.930975] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee/bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1069.931247] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9980caeb-8eba-442d-ba8f-36be7370a162 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.943767] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1069.943767] env[68437]: value = "task-2944733" [ 1069.943767] env[68437]: _type = "Task" [ 1069.943767] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.954561] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944733, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.315357] env[68437]: DEBUG nova.objects.instance [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lazy-loading 'flavor' on Instance uuid a01364f9-e30d-4140-ae41-1e7c4aaa2251 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.407392] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.841s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.410119] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.855s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.410679] env[68437]: DEBUG nova.objects.instance [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lazy-loading 'resources' on Instance uuid 832697dc-53ec-406d-b698-d10766bd8f9d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.418628] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944732, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.428067] env[68437]: INFO nova.scheduler.client.report [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted allocations for instance 75a9fb57-5796-4853-b429-6e8ea7aba1de [ 1070.455752] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944733, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46327} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.456195] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee/bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1070.456558] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1070.457375] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08ae05b8-ca2b-4ad3-ba70-51a8e21cce82 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.466594] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1070.466594] env[68437]: value = "task-2944734" [ 1070.466594] env[68437]: _type = "Task" [ 1070.466594] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.476460] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944734, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.494541] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquiring lock "8d87308a-5583-4785-9f10-13a6f9b5fe98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.494797] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "8d87308a-5583-4785-9f10-13a6f9b5fe98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.597843] env[68437]: DEBUG nova.compute.manager [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1070.624147] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1070.624551] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1070.624749] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1070.624985] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1070.625180] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1070.625366] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1070.625586] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1070.625745] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1070.625945] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1070.626136] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1070.626347] env[68437]: DEBUG nova.virt.hardware [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1070.627562] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecd1a24-d19a-40fe-b640-efb72842bed1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.636730] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a75369-da64-4172-95ac-db52b7cb2741 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.915899] env[68437]: DEBUG oslo_vmware.api [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944732, 'name': PowerOnVM_Task, 'duration_secs': 1.421569} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.916421] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1070.916639] env[68437]: INFO nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Took 8.75 seconds to spawn the instance on the hypervisor. [ 1070.916830] env[68437]: DEBUG nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.917645] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9549b9-725e-4866-9a9b-571dff077e2b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.950297] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e4ba599-851d-4cbd-ae08-4df84f374154 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "75a9fb57-5796-4853-b429-6e8ea7aba1de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.788s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.983199] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944734, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073241} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.984374] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1070.984374] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f78f3c-ef7c-4aac-9baf-1dc3cabf61be {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.997425] env[68437]: DEBUG nova.compute.manager [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1071.008198] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee/bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1071.012088] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00bfd9ca-3b08-4ec5-ae43-ac6a78c2286f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.035546] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1071.035546] env[68437]: value = "task-2944735" [ 1071.035546] env[68437]: _type = "Task" [ 1071.035546] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.046940] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944735, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.231740] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55968001-fd1f-466a-b9ef-62da071bbc7a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.240765] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d72ce95-147c-4e1c-a455-2a0acbff5a83 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.276368] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34d3a4f-96da-4263-93c0-baa3a6471f6f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.285860] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31ed380-e33a-4661-9c99-64d73d6fa2bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.294238] env[68437]: DEBUG nova.compute.manager [req-358f9965-3741-4d1f-a726-7fcbbbea789e req-d31cf1af-8a55-4283-a633-ee228ce4f149 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Received event network-vif-plugged-a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1071.294470] env[68437]: DEBUG oslo_concurrency.lockutils [req-358f9965-3741-4d1f-a726-7fcbbbea789e req-d31cf1af-8a55-4283-a633-ee228ce4f149 service nova] Acquiring lock "e2143e07-8c8d-4008-bb73-29aae91baee7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.294735] env[68437]: DEBUG oslo_concurrency.lockutils [req-358f9965-3741-4d1f-a726-7fcbbbea789e req-d31cf1af-8a55-4283-a633-ee228ce4f149 service nova] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.294924] env[68437]: DEBUG oslo_concurrency.lockutils [req-358f9965-3741-4d1f-a726-7fcbbbea789e req-d31cf1af-8a55-4283-a633-ee228ce4f149 service nova] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.299245] env[68437]: DEBUG nova.compute.manager [req-358f9965-3741-4d1f-a726-7fcbbbea789e req-d31cf1af-8a55-4283-a633-ee228ce4f149 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] No waiting events found dispatching network-vif-plugged-a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1071.299546] env[68437]: WARNING nova.compute.manager [req-358f9965-3741-4d1f-a726-7fcbbbea789e req-d31cf1af-8a55-4283-a633-ee228ce4f149 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Received unexpected event network-vif-plugged-a072479e-e965-4e09-a378-229474b176e6 for instance with vm_state building and task_state spawning. [ 1071.321388] env[68437]: DEBUG nova.compute.provider_tree [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.323484] env[68437]: DEBUG oslo_concurrency.lockutils [None req-69f242e8-8dbe-450f-b59c-b7e448405de4 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.266s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.443964] env[68437]: INFO nova.compute.manager [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Took 25.57 seconds to build instance. [ 1071.444663] env[68437]: DEBUG nova.network.neutron [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Successfully updated port: a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1071.538028] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.548885] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944735, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.825344] env[68437]: DEBUG nova.scheduler.client.report [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1071.828692] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.828920] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.829136] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.829346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.829517] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.831668] env[68437]: INFO nova.compute.manager [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Terminating instance [ 1071.944662] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4a880b0f-109e-4d99-af21-9ce1c143f88d tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.086s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.947337] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.947473] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.947617] env[68437]: DEBUG nova.network.neutron [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1072.049488] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944735, 'name': ReconfigVM_Task, 'duration_secs': 0.92425} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.049642] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Reconfigured VM instance instance-00000058 to attach disk [datastore2] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee/bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.050247] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a351a1b1-f16d-4db0-a872-13256b2d6bc2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.058225] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1072.058225] env[68437]: value = "task-2944736" [ 1072.058225] env[68437]: _type = "Task" [ 1072.058225] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.067686] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944736, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.333067] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.923s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.335476] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.774s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.338113] env[68437]: INFO nova.compute.claims [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1072.341563] env[68437]: DEBUG nova.compute.manager [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1072.341799] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1072.342923] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83cdc7ef-a28e-4259-a1f6-cb8abe21e83b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.352861] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1072.353146] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b679da5b-846e-44d0-b184-5192a13217f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.356145] env[68437]: INFO nova.scheduler.client.report [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Deleted allocations for instance 832697dc-53ec-406d-b698-d10766bd8f9d [ 1072.361703] env[68437]: DEBUG oslo_vmware.api [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1072.361703] env[68437]: value = "task-2944737" [ 1072.361703] env[68437]: _type = "Task" [ 1072.361703] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.373712] env[68437]: DEBUG oslo_vmware.api [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944737, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.490680] env[68437]: DEBUG nova.network.neutron [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1072.576024] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944736, 'name': Rename_Task, 'duration_secs': 0.194687} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.576024] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1072.576024] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-383b1a48-5a14-48f3-be4a-730fafecf7fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.582867] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1072.582867] env[68437]: value = "task-2944738" [ 1072.582867] env[68437]: _type = "Task" [ 1072.582867] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.592604] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.708262] env[68437]: DEBUG nova.network.neutron [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updating instance_info_cache with network_info: [{"id": "a072479e-e965-4e09-a378-229474b176e6", "address": "fa:16:3e:15:03:44", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa072479e-e9", "ovs_interfaceid": "a072479e-e965-4e09-a378-229474b176e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.864055] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e2e07207-8954-4391-85f7-c9421fafa8ea tempest-ServerDiskConfigTestJSON-1875337862 tempest-ServerDiskConfigTestJSON-1875337862-project-member] Lock "832697dc-53ec-406d-b698-d10766bd8f9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.648s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.872616] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.873404] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.877792] env[68437]: DEBUG oslo_vmware.api [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944737, 'name': PowerOffVM_Task, 'duration_secs': 0.41445} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.878907] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1072.878907] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1072.879181] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f52714c2-1445-48cf-be31-a5a12667d7cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.978078] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1072.978423] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1072.978505] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Deleting the datastore file [datastore1] a01364f9-e30d-4140-ae41-1e7c4aaa2251 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.978775] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a95e7357-847a-4ec5-a41e-90f16af32206 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.988146] env[68437]: DEBUG oslo_vmware.api [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1072.988146] env[68437]: value = "task-2944740" [ 1072.988146] env[68437]: _type = "Task" [ 1072.988146] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.999320] env[68437]: DEBUG oslo_vmware.api [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944740, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.093865] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944738, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.137832] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.138310] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.138475] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.138713] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.138937] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.141231] env[68437]: INFO nova.compute.manager [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Terminating instance [ 1073.209668] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.210042] env[68437]: DEBUG nova.compute.manager [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Instance network_info: |[{"id": "a072479e-e965-4e09-a378-229474b176e6", "address": "fa:16:3e:15:03:44", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa072479e-e9", "ovs_interfaceid": "a072479e-e965-4e09-a378-229474b176e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1073.210478] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:03:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a072479e-e965-4e09-a378-229474b176e6', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1073.219053] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1073.219606] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1073.219843] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd16cb2b-a780-4664-9dbf-03c2f6d8c8ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.243398] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1073.243398] env[68437]: value = "task-2944741" [ 1073.243398] env[68437]: _type = "Task" [ 1073.243398] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.252139] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944741, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.325582] env[68437]: DEBUG nova.compute.manager [req-64d8062f-e884-4e05-8b3a-5bbe310b47af req-4f37a36a-fb0f-4d10-9511-35c5c323a99e service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Received event network-changed-a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1073.325582] env[68437]: DEBUG nova.compute.manager [req-64d8062f-e884-4e05-8b3a-5bbe310b47af req-4f37a36a-fb0f-4d10-9511-35c5c323a99e service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Refreshing instance network info cache due to event network-changed-a072479e-e965-4e09-a378-229474b176e6. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1073.328330] env[68437]: DEBUG oslo_concurrency.lockutils [req-64d8062f-e884-4e05-8b3a-5bbe310b47af req-4f37a36a-fb0f-4d10-9511-35c5c323a99e service nova] Acquiring lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.328330] env[68437]: DEBUG oslo_concurrency.lockutils [req-64d8062f-e884-4e05-8b3a-5bbe310b47af req-4f37a36a-fb0f-4d10-9511-35c5c323a99e service nova] Acquired lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.328330] env[68437]: DEBUG nova.network.neutron [req-64d8062f-e884-4e05-8b3a-5bbe310b47af req-4f37a36a-fb0f-4d10-9511-35c5c323a99e service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Refreshing network info cache for port a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1073.379267] env[68437]: DEBUG nova.compute.manager [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1073.503498] env[68437]: DEBUG oslo_vmware.api [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944740, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172745} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.503867] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.504146] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1073.504419] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1073.504622] env[68437]: INFO nova.compute.manager [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1073.504936] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1073.505231] env[68437]: DEBUG nova.compute.manager [-] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1073.505341] env[68437]: DEBUG nova.network.neutron [-] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1073.602976] env[68437]: DEBUG oslo_vmware.api [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944738, 'name': PowerOnVM_Task, 'duration_secs': 0.995633} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.607462] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.607788] env[68437]: INFO nova.compute.manager [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Took 5.41 seconds to spawn the instance on the hypervisor. [ 1073.608366] env[68437]: DEBUG nova.compute.manager [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.610131] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d780044-c662-438f-a6e2-a3161e1f61de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.645044] env[68437]: DEBUG nova.compute.manager [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1073.645309] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1073.647525] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3b7b0d-4c0a-4ee2-9385-401378a05e63 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.656043] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1073.656638] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a0a1437-ac66-4bcc-b38e-789e46047409 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.664952] env[68437]: DEBUG oslo_vmware.api [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1073.664952] env[68437]: value = "task-2944742" [ 1073.664952] env[68437]: _type = "Task" [ 1073.664952] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.677705] env[68437]: DEBUG oslo_vmware.api [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.692585] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc47fca3-d1cd-48f8-8393-072faf04dc62 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.702018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e2ef49-4bb7-4954-9d20-1c0199e8ed08 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.740365] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ddbbfb-1a2c-477b-be55-30ca3990cf1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.751922] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b8f19b-8662-4928-ab7b-ea064a691b17 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.762658] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944741, 'name': CreateVM_Task, 'duration_secs': 0.426897} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.763431] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1073.764470] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.764842] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.765085] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1073.765383] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45959cc8-2f99-4bb6-a50e-156e9371211f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.777457] env[68437]: DEBUG nova.compute.provider_tree [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.784495] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1073.784495] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bf1605-7bbd-c6c2-5268-a3cca9951513" [ 1073.784495] env[68437]: _type = "Task" [ 1073.784495] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.793417] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bf1605-7bbd-c6c2-5268-a3cca9951513, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.905939] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.082430] env[68437]: DEBUG nova.network.neutron [req-64d8062f-e884-4e05-8b3a-5bbe310b47af req-4f37a36a-fb0f-4d10-9511-35c5c323a99e service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updated VIF entry in instance network info cache for port a072479e-e965-4e09-a378-229474b176e6. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1074.082804] env[68437]: DEBUG nova.network.neutron [req-64d8062f-e884-4e05-8b3a-5bbe310b47af req-4f37a36a-fb0f-4d10-9511-35c5c323a99e service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updating instance_info_cache with network_info: [{"id": "a072479e-e965-4e09-a378-229474b176e6", "address": "fa:16:3e:15:03:44", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa072479e-e9", "ovs_interfaceid": "a072479e-e965-4e09-a378-229474b176e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.129549] env[68437]: INFO nova.compute.manager [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Took 20.96 seconds to build instance. [ 1074.177760] env[68437]: DEBUG oslo_vmware.api [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944742, 'name': PowerOffVM_Task, 'duration_secs': 0.390812} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.178060] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1074.178237] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1074.178503] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a217cce-29de-406c-89b7-40f714f62590 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.281936] env[68437]: DEBUG nova.scheduler.client.report [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1074.285335] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1074.285563] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1074.285745] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Deleting the datastore file [datastore1] 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.289627] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2e5cc9a-5aaf-46d6-a393-5096db7594c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.299340] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bf1605-7bbd-c6c2-5268-a3cca9951513, 'name': SearchDatastore_Task, 'duration_secs': 0.017931} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.300882] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.301151] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1074.301395] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.301545] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.301727] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.302087] env[68437]: DEBUG oslo_vmware.api [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1074.302087] env[68437]: value = "task-2944744" [ 1074.302087] env[68437]: _type = "Task" [ 1074.302087] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.302282] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-912dfbdb-1de6-46a7-91b6-3f847f5c3122 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.313901] env[68437]: DEBUG oslo_vmware.api [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.315955] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.316253] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1074.317383] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af2e1c3f-a928-4435-81ac-860a0892a1a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.326036] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1074.326036] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52525136-95fe-c96b-4d26-6d52f6a441b9" [ 1074.326036] env[68437]: _type = "Task" [ 1074.326036] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.339087] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52525136-95fe-c96b-4d26-6d52f6a441b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.585535] env[68437]: DEBUG oslo_concurrency.lockutils [req-64d8062f-e884-4e05-8b3a-5bbe310b47af req-4f37a36a-fb0f-4d10-9511-35c5c323a99e service nova] Releasing lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.632055] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ce5ad0b4-5869-40e3-b9f4-c51bd49abb39 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.479s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.787882] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.788302] env[68437]: DEBUG nova.compute.manager [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1074.792114] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.254s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.793718] env[68437]: INFO nova.compute.claims [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.817359] env[68437]: DEBUG oslo_vmware.api [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.507225} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.817359] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1074.817359] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1074.817682] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1074.817742] env[68437]: INFO nova.compute.manager [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1074.817940] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1074.818285] env[68437]: DEBUG nova.compute.manager [-] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1074.818285] env[68437]: DEBUG nova.network.neutron [-] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1074.838382] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52525136-95fe-c96b-4d26-6d52f6a441b9, 'name': SearchDatastore_Task, 'duration_secs': 0.012977} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.840080] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb460416-38f4-4402-ac7e-047d44ce377e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.846841] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1074.846841] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529b0cbe-7848-8440-3101-d660ce745eee" [ 1074.846841] env[68437]: _type = "Task" [ 1074.846841] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.855866] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529b0cbe-7848-8440-3101-d660ce745eee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.175656] env[68437]: DEBUG nova.network.neutron [-] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.300061] env[68437]: DEBUG nova.compute.utils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1075.302912] env[68437]: DEBUG nova.compute.manager [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1075.303228] env[68437]: DEBUG nova.network.neutron [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1075.345730] env[68437]: DEBUG nova.policy [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7605d44a5b5448a3966872b4f524d13c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40d8becefc85431b9723c72aa09d152b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1075.358623] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529b0cbe-7848-8440-3101-d660ce745eee, 'name': SearchDatastore_Task, 'duration_secs': 0.038869} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.359177] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.359610] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] e2143e07-8c8d-4008-bb73-29aae91baee7/e2143e07-8c8d-4008-bb73-29aae91baee7.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1075.360061] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad27703f-4f34-470d-a6cb-d7e440442b9e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.369131] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1075.369131] env[68437]: value = "task-2944745" [ 1075.369131] env[68437]: _type = "Task" [ 1075.369131] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.379109] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.471607] env[68437]: DEBUG nova.compute.manager [req-5b23471b-b721-462c-bd59-e6eb2c5abc1d req-a4c07544-58d3-4245-9233-06fab09df715 service nova] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Received event network-vif-deleted-d22dccb0-3e51-43b6-8bfe-4f6b83be5b62 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1075.570579] env[68437]: INFO nova.compute.manager [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Rebuilding instance [ 1075.624186] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "4abf1477-2f0e-4a13-884a-c19420b3e435" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.624460] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "4abf1477-2f0e-4a13-884a-c19420b3e435" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.648753] env[68437]: DEBUG nova.compute.manager [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1075.649421] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349fe081-93c6-42d5-a0af-d61b31f46e5b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.678702] env[68437]: INFO nova.compute.manager [-] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Took 2.17 seconds to deallocate network for instance. [ 1075.731533] env[68437]: DEBUG nova.network.neutron [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Successfully created port: 520c7db4-23e9-44bf-846b-9f1eb94579f7 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1075.811764] env[68437]: DEBUG nova.compute.manager [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1075.881473] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944745, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.068664] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-777da020-25ac-4c71-8f68-e45c3f526843 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.077369] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128c5257-154c-4021-812c-28ca7d144c9d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.110406] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07eac7ec-2863-42d0-b9c8-916f882b76bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.119775] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c61519-2a72-4b83-8198-5be968da2ee2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.126310] env[68437]: DEBUG nova.network.neutron [-] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.130227] env[68437]: DEBUG nova.compute.manager [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1076.140272] env[68437]: DEBUG nova.compute.provider_tree [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.187888] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.382336] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944745, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522028} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.382609] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] e2143e07-8c8d-4008-bb73-29aae91baee7/e2143e07-8c8d-4008-bb73-29aae91baee7.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1076.382823] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1076.383092] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d96ecfd-c9cc-437b-a8ea-a3135587a57f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.391440] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1076.391440] env[68437]: value = "task-2944746" [ 1076.391440] env[68437]: _type = "Task" [ 1076.391440] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.400207] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.641092] env[68437]: INFO nova.compute.manager [-] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Took 1.82 seconds to deallocate network for instance. [ 1076.648519] env[68437]: DEBUG nova.scheduler.client.report [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1076.667232] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.667519] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eaa3c5be-74ca-4e9c-9985-3dd53686fedd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.670233] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.678125] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1076.678125] env[68437]: value = "task-2944747" [ 1076.678125] env[68437]: _type = "Task" [ 1076.678125] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.689668] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.823022] env[68437]: DEBUG nova.compute.manager [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1076.849187] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1076.849444] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1076.849608] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1076.849794] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1076.849942] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1076.850155] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1076.850318] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1076.850475] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1076.850645] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1076.850896] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1076.850992] env[68437]: DEBUG nova.virt.hardware [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1076.851864] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270cf69c-e058-4354-b55c-5ca9f0335416 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.860413] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c440d98-0e7e-4041-ad03-a7b88e06807f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.913030] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086636} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.913030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1076.913869] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11641460-14e6-4702-8d39-a2d5614a9305 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.937859] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] e2143e07-8c8d-4008-bb73-29aae91baee7/e2143e07-8c8d-4008-bb73-29aae91baee7.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.938196] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37fa220d-de70-497c-8e12-50a1b49c9db7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.960347] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1076.960347] env[68437]: value = "task-2944748" [ 1076.960347] env[68437]: _type = "Task" [ 1076.960347] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.969053] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944748, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.141239] env[68437]: DEBUG nova.compute.manager [req-6380805a-911d-4700-9d2c-40a6a91aea9d req-c4e0ae85-4d0a-477d-adf3-d02051d3e80d service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Received event network-vif-plugged-520c7db4-23e9-44bf-846b-9f1eb94579f7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1077.141464] env[68437]: DEBUG oslo_concurrency.lockutils [req-6380805a-911d-4700-9d2c-40a6a91aea9d req-c4e0ae85-4d0a-477d-adf3-d02051d3e80d service nova] Acquiring lock "76d97a56-21a2-4363-a987-ef872f056510-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.141668] env[68437]: DEBUG oslo_concurrency.lockutils [req-6380805a-911d-4700-9d2c-40a6a91aea9d req-c4e0ae85-4d0a-477d-adf3-d02051d3e80d service nova] Lock "76d97a56-21a2-4363-a987-ef872f056510-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.141835] env[68437]: DEBUG oslo_concurrency.lockutils [req-6380805a-911d-4700-9d2c-40a6a91aea9d req-c4e0ae85-4d0a-477d-adf3-d02051d3e80d service nova] Lock "76d97a56-21a2-4363-a987-ef872f056510-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.142047] env[68437]: DEBUG nova.compute.manager [req-6380805a-911d-4700-9d2c-40a6a91aea9d req-c4e0ae85-4d0a-477d-adf3-d02051d3e80d service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] No waiting events found dispatching network-vif-plugged-520c7db4-23e9-44bf-846b-9f1eb94579f7 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1077.143424] env[68437]: WARNING nova.compute.manager [req-6380805a-911d-4700-9d2c-40a6a91aea9d req-c4e0ae85-4d0a-477d-adf3-d02051d3e80d service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Received unexpected event network-vif-plugged-520c7db4-23e9-44bf-846b-9f1eb94579f7 for instance with vm_state building and task_state spawning. [ 1077.155642] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.155642] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.155875] env[68437]: DEBUG nova.compute.manager [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1077.159318] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.255s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.160689] env[68437]: INFO nova.compute.claims [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1077.189732] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944747, 'name': PowerOffVM_Task, 'duration_secs': 0.315733} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.190285] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.191522] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1077.192482] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe88196-8da0-4619-9c8a-ca8c855e631a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.202432] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1077.202713] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fac2832-2057-4d61-b96b-6d49d8293040 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.236102] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1077.236385] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1077.236616] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Deleting the datastore file [datastore2] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1077.236939] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb357fd9-2c59-4f89-891f-f6918d14454d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.242434] env[68437]: DEBUG nova.network.neutron [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Successfully updated port: 520c7db4-23e9-44bf-846b-9f1eb94579f7 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1077.251036] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1077.251036] env[68437]: value = "task-2944750" [ 1077.251036] env[68437]: _type = "Task" [ 1077.251036] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.262041] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.471447] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944748, 'name': ReconfigVM_Task, 'duration_secs': 0.337888} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.472487] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Reconfigured VM instance instance-00000059 to attach disk [datastore1] e2143e07-8c8d-4008-bb73-29aae91baee7/e2143e07-8c8d-4008-bb73-29aae91baee7.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1077.472487] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c23da0b1-f65c-49e9-ae1e-da2022bb94de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.480516] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1077.480516] env[68437]: value = "task-2944751" [ 1077.480516] env[68437]: _type = "Task" [ 1077.480516] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.490475] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944751, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.502782] env[68437]: DEBUG nova.compute.manager [req-0fe8005f-e83b-4b13-b386-b1b2844780b4 req-8b6c0bad-975e-43dc-b9d5-2bf1b5a60e01 service nova] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Received event network-vif-deleted-9af0d88e-b67d-45be-baec-62c7b8a6f80b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1077.660889] env[68437]: DEBUG nova.compute.utils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1077.662955] env[68437]: DEBUG nova.compute.manager [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1077.663157] env[68437]: DEBUG nova.network.neutron [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1077.706907] env[68437]: DEBUG nova.policy [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56a40a3467524ac39aeca66c5218bc8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c318fe7682014cef8414569a694c4f11', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1077.745300] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "refresh_cache-76d97a56-21a2-4363-a987-ef872f056510" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.745516] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "refresh_cache-76d97a56-21a2-4363-a987-ef872f056510" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.745699] env[68437]: DEBUG nova.network.neutron [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1077.762868] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120115} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.763160] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.763349] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1077.763529] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.966468] env[68437]: DEBUG nova.network.neutron [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Successfully created port: 593fcc5a-6c63-4ec2-98bd-9931c0e674d9 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1077.993609] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944751, 'name': Rename_Task, 'duration_secs': 0.267797} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.993978] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1077.994215] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41ccbb2f-dbe5-455d-8254-d760e723b394 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.004912] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1078.004912] env[68437]: value = "task-2944752" [ 1078.004912] env[68437]: _type = "Task" [ 1078.004912] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.016381] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944752, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.166551] env[68437]: DEBUG nova.compute.manager [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1078.280022] env[68437]: DEBUG nova.network.neutron [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1078.412682] env[68437]: DEBUG nova.network.neutron [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Updating instance_info_cache with network_info: [{"id": "520c7db4-23e9-44bf-846b-9f1eb94579f7", "address": "fa:16:3e:23:fb:b9", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap520c7db4-23", "ovs_interfaceid": "520c7db4-23e9-44bf-846b-9f1eb94579f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.415410] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9820c97-3c55-4359-aeda-0d92857d1737 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.424334] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f84b786-3773-411a-b6b8-f1e807d062cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.460434] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9056fcc7-f86e-4e5a-a392-de285ae9dd76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.472722] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df61ea67-7646-43e5-81ba-89f731042391 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.493513] env[68437]: DEBUG nova.compute.provider_tree [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.519439] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944752, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.806432] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1078.806687] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.806867] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1078.807069] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.807222] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1078.807369] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1078.807582] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1078.807744] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1078.807912] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1078.808221] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1078.808422] env[68437]: DEBUG nova.virt.hardware [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1078.809347] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8952fbf9-4ee7-4844-9e94-62e2182e806d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.818105] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0655e230-f349-4f18-9339-0db299125cdc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.832332] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1078.840046] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1078.840046] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1078.840046] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c38ac052-5119-4d64-94aa-bc7cb19e9ab0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.857649] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1078.857649] env[68437]: value = "task-2944753" [ 1078.857649] env[68437]: _type = "Task" [ 1078.857649] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.867993] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944753, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.920043] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "refresh_cache-76d97a56-21a2-4363-a987-ef872f056510" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.920402] env[68437]: DEBUG nova.compute.manager [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Instance network_info: |[{"id": "520c7db4-23e9-44bf-846b-9f1eb94579f7", "address": "fa:16:3e:23:fb:b9", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap520c7db4-23", "ovs_interfaceid": "520c7db4-23e9-44bf-846b-9f1eb94579f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1078.920842] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:fb:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '520c7db4-23e9-44bf-846b-9f1eb94579f7', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1078.930201] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1078.930511] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1078.930831] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7c4f455-adee-453c-8944-068a07f7adfb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.955757] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1078.955757] env[68437]: value = "task-2944754" [ 1078.955757] env[68437]: _type = "Task" [ 1078.955757] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.967023] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944754, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.997821] env[68437]: DEBUG nova.scheduler.client.report [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1079.017305] env[68437]: DEBUG oslo_vmware.api [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944752, 'name': PowerOnVM_Task, 'duration_secs': 0.53699} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.017689] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1079.017790] env[68437]: INFO nova.compute.manager [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1079.017969] env[68437]: DEBUG nova.compute.manager [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1079.018798] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2cf204-84f6-4a21-bef6-838178a10613 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.178856] env[68437]: DEBUG nova.compute.manager [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1079.182471] env[68437]: DEBUG nova.compute.manager [req-e3eb2fb3-e311-463d-b524-6ea547b5e596 req-e327ed07-4828-46e6-8fac-01122b013007 service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Received event network-changed-520c7db4-23e9-44bf-846b-9f1eb94579f7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1079.183146] env[68437]: DEBUG nova.compute.manager [req-e3eb2fb3-e311-463d-b524-6ea547b5e596 req-e327ed07-4828-46e6-8fac-01122b013007 service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Refreshing instance network info cache due to event network-changed-520c7db4-23e9-44bf-846b-9f1eb94579f7. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1079.183298] env[68437]: DEBUG oslo_concurrency.lockutils [req-e3eb2fb3-e311-463d-b524-6ea547b5e596 req-e327ed07-4828-46e6-8fac-01122b013007 service nova] Acquiring lock "refresh_cache-76d97a56-21a2-4363-a987-ef872f056510" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.183448] env[68437]: DEBUG oslo_concurrency.lockutils [req-e3eb2fb3-e311-463d-b524-6ea547b5e596 req-e327ed07-4828-46e6-8fac-01122b013007 service nova] Acquired lock "refresh_cache-76d97a56-21a2-4363-a987-ef872f056510" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.183605] env[68437]: DEBUG nova.network.neutron [req-e3eb2fb3-e311-463d-b524-6ea547b5e596 req-e327ed07-4828-46e6-8fac-01122b013007 service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Refreshing network info cache for port 520c7db4-23e9-44bf-846b-9f1eb94579f7 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1079.206829] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1079.207164] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.207443] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1079.207734] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.207954] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1079.209335] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1079.209335] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1079.209335] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1079.209608] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1079.209833] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1079.210070] env[68437]: DEBUG nova.virt.hardware [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1079.211313] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0f8415-590d-46a9-b384-df144781c62a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.221626] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a330937-fb14-4471-90b9-d0a235c5fbaa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.371088] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944753, 'name': CreateVM_Task, 'duration_secs': 0.312333} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.371088] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1079.371464] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.371507] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.372666] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1079.372666] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecc6f371-1c44-4e21-ab7e-c59e23bcdec8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.377569] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1079.377569] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52294c67-7180-a75f-941e-5a4c605a442f" [ 1079.377569] env[68437]: _type = "Task" [ 1079.377569] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.385985] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52294c67-7180-a75f-941e-5a4c605a442f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.455918] env[68437]: DEBUG nova.network.neutron [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Successfully updated port: 593fcc5a-6c63-4ec2-98bd-9931c0e674d9 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1079.475119] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944754, 'name': CreateVM_Task, 'duration_secs': 0.41624} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.475527] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1079.476423] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.504083] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.345s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.505174] env[68437]: DEBUG nova.compute.manager [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1079.508875] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.321s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.509173] env[68437]: DEBUG nova.objects.instance [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lazy-loading 'resources' on Instance uuid a01364f9-e30d-4140-ae41-1e7c4aaa2251 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1079.541368] env[68437]: INFO nova.compute.manager [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Took 23.66 seconds to build instance. [ 1079.893136] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52294c67-7180-a75f-941e-5a4c605a442f, 'name': SearchDatastore_Task, 'duration_secs': 0.013578} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.893680] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.894276] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1079.894725] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.894819] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.895302] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1079.895790] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.896315] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1079.896888] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a15365ab-d221-4e68-9d05-bd062196e477 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.899918] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-952b06e4-bffe-45a4-bdf5-7deb9fb5d285 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.908162] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1079.908162] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52847310-bc14-d408-f75f-15af07e2ff4a" [ 1079.908162] env[68437]: _type = "Task" [ 1079.908162] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.914796] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1079.915171] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1079.917068] env[68437]: DEBUG nova.network.neutron [req-e3eb2fb3-e311-463d-b524-6ea547b5e596 req-e327ed07-4828-46e6-8fac-01122b013007 service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Updated VIF entry in instance network info cache for port 520c7db4-23e9-44bf-846b-9f1eb94579f7. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1079.917596] env[68437]: DEBUG nova.network.neutron [req-e3eb2fb3-e311-463d-b524-6ea547b5e596 req-e327ed07-4828-46e6-8fac-01122b013007 service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Updating instance_info_cache with network_info: [{"id": "520c7db4-23e9-44bf-846b-9f1eb94579f7", "address": "fa:16:3e:23:fb:b9", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap520c7db4-23", "ovs_interfaceid": "520c7db4-23e9-44bf-846b-9f1eb94579f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.921017] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac0cf1b6-ee25-45e3-8adf-a013be257046 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.927367] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52847310-bc14-d408-f75f-15af07e2ff4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.933805] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1079.933805] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5271d93a-dcfb-7a31-b97b-1e6ef19771fb" [ 1079.933805] env[68437]: _type = "Task" [ 1079.933805] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.944435] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5271d93a-dcfb-7a31-b97b-1e6ef19771fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.969296] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquiring lock "refresh_cache-8d87308a-5583-4785-9f10-13a6f9b5fe98" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.969533] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquired lock "refresh_cache-8d87308a-5583-4785-9f10-13a6f9b5fe98" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.969699] env[68437]: DEBUG nova.network.neutron [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1080.019236] env[68437]: DEBUG nova.compute.utils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1080.020345] env[68437]: DEBUG nova.compute.manager [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1080.021053] env[68437]: DEBUG nova.network.neutron [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1080.047410] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b12d0d35-a3d7-4a59-9f98-8435092bf85c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.169s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.088277] env[68437]: DEBUG nova.policy [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ff3c9a96f10413f860946488fa85aee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38ad479949b24307b08e16fdb821c76f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1080.317973] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b65ce46-097c-4139-b06e-0b2ab14e0da4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.328262] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a04337-1c15-40db-b443-beb03ebea0ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.359276] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1fdf441-74ed-401a-bc61-4f59ab3a547c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.368227] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0c3ea4-57f0-45ed-824a-461366c154ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.385170] env[68437]: DEBUG nova.compute.provider_tree [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.393115] env[68437]: DEBUG nova.network.neutron [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Successfully created port: 63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.421902] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52847310-bc14-d408-f75f-15af07e2ff4a, 'name': SearchDatastore_Task, 'duration_secs': 0.026075} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.422274] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.422643] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1080.422937] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.428073] env[68437]: DEBUG oslo_concurrency.lockutils [req-e3eb2fb3-e311-463d-b524-6ea547b5e596 req-e327ed07-4828-46e6-8fac-01122b013007 service nova] Releasing lock "refresh_cache-76d97a56-21a2-4363-a987-ef872f056510" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.445125] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5271d93a-dcfb-7a31-b97b-1e6ef19771fb, 'name': SearchDatastore_Task, 'duration_secs': 0.028461} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.445932] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9ecbca5-91b1-4787-8660-90266341adc9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.453655] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1080.453655] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a8d120-532a-8fd3-98c5-84f55b3dc9c4" [ 1080.453655] env[68437]: _type = "Task" [ 1080.453655] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.462575] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a8d120-532a-8fd3-98c5-84f55b3dc9c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.524776] env[68437]: DEBUG nova.compute.manager [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1080.568627] env[68437]: DEBUG nova.network.neutron [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1080.889134] env[68437]: DEBUG nova.scheduler.client.report [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.967624] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a8d120-532a-8fd3-98c5-84f55b3dc9c4, 'name': SearchDatastore_Task, 'duration_secs': 0.011065} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.967992] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.969739] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee/bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1080.969739] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.969739] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1080.969739] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14250a9a-8790-4296-a5ba-3ef074a2329a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.972215] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d700e41-1e0c-43ef-8029-0133b25c7827 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.981456] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1080.981456] env[68437]: value = "task-2944755" [ 1080.981456] env[68437]: _type = "Task" [ 1080.981456] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.983014] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1080.983444] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1080.988196] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0545fb14-1d99-4a3a-a3e1-a18d575fd546 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.996800] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1080.996800] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52865a78-7cbb-0449-5f74-92ea2329f0ec" [ 1080.996800] env[68437]: _type = "Task" [ 1080.996800] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.000267] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.010985] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52865a78-7cbb-0449-5f74-92ea2329f0ec, 'name': SearchDatastore_Task, 'duration_secs': 0.011944} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.011896] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f69d510-5d7f-420e-a404-03b63e111b94 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.021305] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1081.021305] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528e282a-07da-fd26-b0a9-0e4331cd948a" [ 1081.021305] env[68437]: _type = "Task" [ 1081.021305] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.028484] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528e282a-07da-fd26-b0a9-0e4331cd948a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.227176] env[68437]: DEBUG nova.compute.manager [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Received event network-vif-plugged-593fcc5a-6c63-4ec2-98bd-9931c0e674d9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1081.227462] env[68437]: DEBUG oslo_concurrency.lockutils [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] Acquiring lock "8d87308a-5583-4785-9f10-13a6f9b5fe98-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.227694] env[68437]: DEBUG oslo_concurrency.lockutils [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] Lock "8d87308a-5583-4785-9f10-13a6f9b5fe98-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.227860] env[68437]: DEBUG oslo_concurrency.lockutils [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] Lock "8d87308a-5583-4785-9f10-13a6f9b5fe98-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.228018] env[68437]: DEBUG nova.compute.manager [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] No waiting events found dispatching network-vif-plugged-593fcc5a-6c63-4ec2-98bd-9931c0e674d9 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1081.230356] env[68437]: WARNING nova.compute.manager [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Received unexpected event network-vif-plugged-593fcc5a-6c63-4ec2-98bd-9931c0e674d9 for instance with vm_state building and task_state spawning. [ 1081.230356] env[68437]: DEBUG nova.compute.manager [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Received event network-changed-593fcc5a-6c63-4ec2-98bd-9931c0e674d9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1081.230356] env[68437]: DEBUG nova.compute.manager [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Refreshing instance network info cache due to event network-changed-593fcc5a-6c63-4ec2-98bd-9931c0e674d9. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1081.230356] env[68437]: DEBUG oslo_concurrency.lockutils [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] Acquiring lock "refresh_cache-8d87308a-5583-4785-9f10-13a6f9b5fe98" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.276903] env[68437]: DEBUG nova.network.neutron [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Updating instance_info_cache with network_info: [{"id": "593fcc5a-6c63-4ec2-98bd-9931c0e674d9", "address": "fa:16:3e:84:b1:83", "network": {"id": "00727c4f-cb1b-46d8-8980-c8f3ff918c87", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-2010340519-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c318fe7682014cef8414569a694c4f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap593fcc5a-6c", "ovs_interfaceid": "593fcc5a-6c63-4ec2-98bd-9931c0e674d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.395016] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.886s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.398138] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.728s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.399938] env[68437]: INFO nova.compute.claims [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.424436] env[68437]: INFO nova.scheduler.client.report [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Deleted allocations for instance a01364f9-e30d-4140-ae41-1e7c4aaa2251 [ 1081.496926] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944755, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.530560] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528e282a-07da-fd26-b0a9-0e4331cd948a, 'name': SearchDatastore_Task, 'duration_secs': 0.018991} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.530915] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.531293] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 76d97a56-21a2-4363-a987-ef872f056510/76d97a56-21a2-4363-a987-ef872f056510.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1081.531742] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3c4a5e0-5601-40c2-8991-850d6be1f314 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.538828] env[68437]: DEBUG nova.compute.manager [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1081.544097] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1081.544097] env[68437]: value = "task-2944756" [ 1081.544097] env[68437]: _type = "Task" [ 1081.544097] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.558816] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944756, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.576015] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1081.576354] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.577273] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1081.577273] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.577476] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1081.577594] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1081.577928] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1081.578210] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1081.578494] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1081.578725] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1081.578958] env[68437]: DEBUG nova.virt.hardware [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1081.580806] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c19a8b4-75ec-4bf7-92f2-2a27bfe7514e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.591772] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab63aa2a-ddba-4489-bd29-af1501e812ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.781366] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Releasing lock "refresh_cache-8d87308a-5583-4785-9f10-13a6f9b5fe98" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.781366] env[68437]: DEBUG nova.compute.manager [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Instance network_info: |[{"id": "593fcc5a-6c63-4ec2-98bd-9931c0e674d9", "address": "fa:16:3e:84:b1:83", "network": {"id": "00727c4f-cb1b-46d8-8980-c8f3ff918c87", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-2010340519-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c318fe7682014cef8414569a694c4f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap593fcc5a-6c", "ovs_interfaceid": "593fcc5a-6c63-4ec2-98bd-9931c0e674d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1081.781366] env[68437]: DEBUG oslo_concurrency.lockutils [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] Acquired lock "refresh_cache-8d87308a-5583-4785-9f10-13a6f9b5fe98" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.781366] env[68437]: DEBUG nova.network.neutron [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Refreshing network info cache for port 593fcc5a-6c63-4ec2-98bd-9931c0e674d9 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1081.782274] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:b1:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11032cc2-b275-48d2-9c40-9455ea7d49e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '593fcc5a-6c63-4ec2-98bd-9931c0e674d9', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1081.793806] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Creating folder: Project (c318fe7682014cef8414569a694c4f11). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1081.796193] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-146d9985-d089-4d46-8a0b-ac531ae9346b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.810691] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Created folder: Project (c318fe7682014cef8414569a694c4f11) in parent group-v590848. [ 1081.810911] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Creating folder: Instances. Parent ref: group-v591092. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1081.811171] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e974833-feb1-4fd8-b8e4-18055ffbe7ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.822476] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Created folder: Instances in parent group-v591092. [ 1081.822734] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1081.822937] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1081.823206] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5abf0e33-df4c-42ad-b30e-e86fcf3e3f90 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.848322] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1081.848322] env[68437]: value = "task-2944759" [ 1081.848322] env[68437]: _type = "Task" [ 1081.848322] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.859119] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944759, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.894549] env[68437]: DEBUG nova.compute.manager [req-3e6c01ed-802b-4b3e-ac8c-b58ddd3f0b53 req-3ac55130-eab8-49fc-be19-c90e278fbd9c service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Received event network-changed-a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1081.894749] env[68437]: DEBUG nova.compute.manager [req-3e6c01ed-802b-4b3e-ac8c-b58ddd3f0b53 req-3ac55130-eab8-49fc-be19-c90e278fbd9c service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Refreshing instance network info cache due to event network-changed-a072479e-e965-4e09-a378-229474b176e6. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1081.894964] env[68437]: DEBUG oslo_concurrency.lockutils [req-3e6c01ed-802b-4b3e-ac8c-b58ddd3f0b53 req-3ac55130-eab8-49fc-be19-c90e278fbd9c service nova] Acquiring lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.895616] env[68437]: DEBUG oslo_concurrency.lockutils [req-3e6c01ed-802b-4b3e-ac8c-b58ddd3f0b53 req-3ac55130-eab8-49fc-be19-c90e278fbd9c service nova] Acquired lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.895804] env[68437]: DEBUG nova.network.neutron [req-3e6c01ed-802b-4b3e-ac8c-b58ddd3f0b53 req-3ac55130-eab8-49fc-be19-c90e278fbd9c service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Refreshing network info cache for port a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1081.940782] env[68437]: DEBUG nova.network.neutron [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Successfully updated port: 63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1081.948568] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c8b0a144-8753-4d60-8bf0-a903a3bfb882 tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "a01364f9-e30d-4140-ae41-1e7c4aaa2251" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.119s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.998787] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537268} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.998787] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee/bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1081.999050] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1081.999282] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-416b3bd4-936a-4bcf-91ab-d49909fdce94 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.007640] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1082.007640] env[68437]: value = "task-2944760" [ 1082.007640] env[68437]: _type = "Task" [ 1082.007640] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.019146] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944760, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.061409] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944756, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.364505] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944759, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.367602] env[68437]: DEBUG nova.network.neutron [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Updated VIF entry in instance network info cache for port 593fcc5a-6c63-4ec2-98bd-9931c0e674d9. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1082.367602] env[68437]: DEBUG nova.network.neutron [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Updating instance_info_cache with network_info: [{"id": "593fcc5a-6c63-4ec2-98bd-9931c0e674d9", "address": "fa:16:3e:84:b1:83", "network": {"id": "00727c4f-cb1b-46d8-8980-c8f3ff918c87", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-2010340519-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c318fe7682014cef8414569a694c4f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap593fcc5a-6c", "ovs_interfaceid": "593fcc5a-6c63-4ec2-98bd-9931c0e674d9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.450542] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.450810] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.450850] env[68437]: DEBUG nova.network.neutron [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1082.523030] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944760, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115038} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.523436] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1082.528021] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d402272c-4241-46a4-86c1-4b4717579c03 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.555563] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee/bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1082.559654] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47a6a579-c393-48f9-94b3-7bf30494ce29 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.584649] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944756, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.904186} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.586783] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 76d97a56-21a2-4363-a987-ef872f056510/76d97a56-21a2-4363-a987-ef872f056510.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1082.586783] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1082.586783] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1082.586783] env[68437]: value = "task-2944762" [ 1082.586783] env[68437]: _type = "Task" [ 1082.586783] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.587149] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0572c7d5-20eb-442d-bd26-dfe65bd26377 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.602296] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944762, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.604051] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1082.604051] env[68437]: value = "task-2944763" [ 1082.604051] env[68437]: _type = "Task" [ 1082.604051] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.616269] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.702463] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7265e702-1d57-4776-9172-58baadd72eb9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.710790] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e89f4a-392a-4a9b-b0a1-bd684620c35f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.749163] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96ba9e5-e58a-4328-a2bd-f7a74bbde183 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.759910] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7816c8-138d-4768-9abe-dca267d6cd8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.778887] env[68437]: DEBUG nova.compute.provider_tree [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.860822] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944759, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.869611] env[68437]: DEBUG oslo_concurrency.lockutils [req-1070b829-8281-44b8-a2e4-0b1dd8e65bb9 req-e15dac05-74f6-42cc-b0a6-f7325d17fcf3 service nova] Releasing lock "refresh_cache-8d87308a-5583-4785-9f10-13a6f9b5fe98" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.964995] env[68437]: DEBUG nova.network.neutron [req-3e6c01ed-802b-4b3e-ac8c-b58ddd3f0b53 req-3ac55130-eab8-49fc-be19-c90e278fbd9c service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updated VIF entry in instance network info cache for port a072479e-e965-4e09-a378-229474b176e6. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1082.965389] env[68437]: DEBUG nova.network.neutron [req-3e6c01ed-802b-4b3e-ac8c-b58ddd3f0b53 req-3ac55130-eab8-49fc-be19-c90e278fbd9c service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updating instance_info_cache with network_info: [{"id": "a072479e-e965-4e09-a378-229474b176e6", "address": "fa:16:3e:15:03:44", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa072479e-e9", "ovs_interfaceid": "a072479e-e965-4e09-a378-229474b176e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.984721] env[68437]: DEBUG nova.network.neutron [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1083.102694] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944762, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.116487] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.217523} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.117722] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1083.117722] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fecf7a-a054-42dc-a6ad-90deac282c20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.147207] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 76d97a56-21a2-4363-a987-ef872f056510/76d97a56-21a2-4363-a987-ef872f056510.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1083.148612] env[68437]: DEBUG nova.network.neutron [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Updating instance_info_cache with network_info: [{"id": "63b32499-5908-4ff2-8386-9e979aee59c9", "address": "fa:16:3e:3b:f7:4a", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b32499-59", "ovs_interfaceid": "63b32499-5908-4ff2-8386-9e979aee59c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.149917] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67542be9-d840-476e-87db-5ded46ed5c10 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.165518] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.165802] env[68437]: DEBUG nova.compute.manager [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Instance network_info: |[{"id": "63b32499-5908-4ff2-8386-9e979aee59c9", "address": "fa:16:3e:3b:f7:4a", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b32499-59", "ovs_interfaceid": "63b32499-5908-4ff2-8386-9e979aee59c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1083.166246] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:f7:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63b32499-5908-4ff2-8386-9e979aee59c9', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.173755] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.174709] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.174962] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54452447-c56f-45ec-a3a6-fcb024d3b35c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.194932] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1083.194932] env[68437]: value = "task-2944764" [ 1083.194932] env[68437]: _type = "Task" [ 1083.194932] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.195594] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "191b441c-2c9f-48f9-b83a-d539722e6375" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.195777] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "191b441c-2c9f-48f9-b83a-d539722e6375" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.203689] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.203689] env[68437]: value = "task-2944765" [ 1083.203689] env[68437]: _type = "Task" [ 1083.203689] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.207597] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944764, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.223320] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944765, 'name': CreateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.285865] env[68437]: DEBUG nova.scheduler.client.report [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1083.361820] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944759, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.382045] env[68437]: INFO nova.compute.manager [None req-efa21a6c-34c2-4a3f-b503-8b54ba69ff53 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Get console output [ 1083.382407] env[68437]: WARNING nova.virt.vmwareapi.driver [None req-efa21a6c-34c2-4a3f-b503-8b54ba69ff53 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] The console log is missing. Check your VSPC configuration [ 1083.468122] env[68437]: DEBUG oslo_concurrency.lockutils [req-3e6c01ed-802b-4b3e-ac8c-b58ddd3f0b53 req-3ac55130-eab8-49fc-be19-c90e278fbd9c service nova] Releasing lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.574769] env[68437]: DEBUG nova.compute.manager [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Received event network-vif-plugged-63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1083.574965] env[68437]: DEBUG oslo_concurrency.lockutils [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] Acquiring lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.575560] env[68437]: DEBUG oslo_concurrency.lockutils [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.575665] env[68437]: DEBUG oslo_concurrency.lockutils [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.576058] env[68437]: DEBUG nova.compute.manager [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] No waiting events found dispatching network-vif-plugged-63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1083.576140] env[68437]: WARNING nova.compute.manager [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Received unexpected event network-vif-plugged-63b32499-5908-4ff2-8386-9e979aee59c9 for instance with vm_state building and task_state spawning. [ 1083.576277] env[68437]: DEBUG nova.compute.manager [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Received event network-changed-63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1083.576434] env[68437]: DEBUG nova.compute.manager [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Refreshing instance network info cache due to event network-changed-63b32499-5908-4ff2-8386-9e979aee59c9. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1083.576644] env[68437]: DEBUG oslo_concurrency.lockutils [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] Acquiring lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.577097] env[68437]: DEBUG oslo_concurrency.lockutils [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] Acquired lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.577163] env[68437]: DEBUG nova.network.neutron [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Refreshing network info cache for port 63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1083.602257] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944762, 'name': ReconfigVM_Task, 'duration_secs': 0.539023} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.603009] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Reconfigured VM instance instance-00000058 to attach disk [datastore1] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee/bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1083.603888] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f33dffe1-96dd-47d0-b7c7-ca929df907ea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.617820] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1083.617820] env[68437]: value = "task-2944766" [ 1083.617820] env[68437]: _type = "Task" [ 1083.617820] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.632844] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944766, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.705257] env[68437]: DEBUG nova.compute.manager [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1083.723731] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.731413] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944765, 'name': CreateVM_Task, 'duration_secs': 0.482397} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.731603] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.732383] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.732562] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.732891] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.733180] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58df89c3-77f9-43c7-befa-3c1ffaabfa87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.763512] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1083.763512] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52724cfb-4d98-36f2-b5b2-ea5d51d063af" [ 1083.763512] env[68437]: _type = "Task" [ 1083.763512] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.771941] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52724cfb-4d98-36f2-b5b2-ea5d51d063af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.795320] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.795320] env[68437]: DEBUG nova.compute.manager [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1083.796627] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.642s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.799198] env[68437]: DEBUG nova.objects.instance [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lazy-loading 'resources' on Instance uuid 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.863031] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944759, 'name': CreateVM_Task, 'duration_secs': 1.746407} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.863031] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.863875] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.864021] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.864306] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.864693] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bdb8080-cdbb-4339-90bd-caa64b0b186e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.871039] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1083.871039] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52659f27-b435-31bb-d54b-655738c3d803" [ 1083.871039] env[68437]: _type = "Task" [ 1083.871039] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.884022] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52659f27-b435-31bb-d54b-655738c3d803, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.130114] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944766, 'name': Rename_Task, 'duration_secs': 0.23472} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.130447] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1084.130700] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd01384b-d9ae-4124-9989-1fe699fd6f43 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.138567] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1084.138567] env[68437]: value = "task-2944767" [ 1084.138567] env[68437]: _type = "Task" [ 1084.138567] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.149711] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.184242] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquiring lock "9d54d4b6-9b92-4a24-9582-475108bf2710" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.184536] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "9d54d4b6-9b92-4a24-9582-475108bf2710" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.210735] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944764, 'name': ReconfigVM_Task, 'duration_secs': 0.594337} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.211031] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 76d97a56-21a2-4363-a987-ef872f056510/76d97a56-21a2-4363-a987-ef872f056510.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1084.211648] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a1a57de-65bd-4d6f-bb19-598954b46513 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.219518] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1084.219518] env[68437]: value = "task-2944768" [ 1084.219518] env[68437]: _type = "Task" [ 1084.219518] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.232953] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944768, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.234878] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.272867] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52724cfb-4d98-36f2-b5b2-ea5d51d063af, 'name': SearchDatastore_Task, 'duration_secs': 0.016685} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.273217] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.273485] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.273726] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.273951] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.274196] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.274465] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-529a6035-458e-40e7-b284-8f6651d8c0b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.284726] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.284919] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.285644] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b44da958-26a0-4547-804a-de56fe9af744 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.291665] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1084.291665] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523dc28f-767f-d32a-7486-6f5b12cd11d0" [ 1084.291665] env[68437]: _type = "Task" [ 1084.291665] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.296312] env[68437]: DEBUG nova.network.neutron [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Updated VIF entry in instance network info cache for port 63b32499-5908-4ff2-8386-9e979aee59c9. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1084.296685] env[68437]: DEBUG nova.network.neutron [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Updating instance_info_cache with network_info: [{"id": "63b32499-5908-4ff2-8386-9e979aee59c9", "address": "fa:16:3e:3b:f7:4a", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b32499-59", "ovs_interfaceid": "63b32499-5908-4ff2-8386-9e979aee59c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.303313] env[68437]: DEBUG nova.compute.utils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1084.307142] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523dc28f-767f-d32a-7486-6f5b12cd11d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.308421] env[68437]: DEBUG nova.compute.manager [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1084.308586] env[68437]: DEBUG nova.network.neutron [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1084.354569] env[68437]: DEBUG nova.policy [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e4b1b3012874778bc147c3e7b00133c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6f6382f6c6843529a37d7c62837523a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1084.381337] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52659f27-b435-31bb-d54b-655738c3d803, 'name': SearchDatastore_Task, 'duration_secs': 0.044533} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.385224] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.385466] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.385702] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.385854] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.386048] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.386482] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ec5f230-8bc8-4dc8-aaac-aa2f4f667746 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.397878] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.398172] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.401444] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea56493d-895d-4c1f-9001-56270a717ebc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.408518] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1084.408518] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5261f01e-f70c-4bd7-2278-4bde06188dd3" [ 1084.408518] env[68437]: _type = "Task" [ 1084.408518] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.417329] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5261f01e-f70c-4bd7-2278-4bde06188dd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.478046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.478046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.478436] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.478436] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.478612] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.481185] env[68437]: INFO nova.compute.manager [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Terminating instance [ 1084.563920] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee68824b-8e9b-4b50-a9a3-185f983905a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.574021] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb31bc3c-d60f-47cb-bbea-9123c8f074a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.606404] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649d497d-6afb-4020-9fa7-dd320cc7d582 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.616945] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed92afe-c5cf-451c-92f9-e5acc39bac1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.631091] env[68437]: DEBUG nova.compute.provider_tree [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.650266] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944767, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.686603] env[68437]: DEBUG nova.compute.manager [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1084.702558] env[68437]: DEBUG nova.network.neutron [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Successfully created port: c58ce980-01f0-476a-b297-adac9a7fcdef {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1084.735478] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944768, 'name': Rename_Task, 'duration_secs': 0.228806} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.735758] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1084.736089] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-769db36d-f46a-4a41-87f1-39e6ca055ede {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.744095] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1084.744095] env[68437]: value = "task-2944769" [ 1084.744095] env[68437]: _type = "Task" [ 1084.744095] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.753271] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.805434] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523dc28f-767f-d32a-7486-6f5b12cd11d0, 'name': SearchDatastore_Task, 'duration_secs': 0.011454} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.805434] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f74869f-3f30-420e-b45a-88490810ad2a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.808883] env[68437]: DEBUG oslo_concurrency.lockutils [req-ae773562-a08f-4232-ad44-40f1ec811dbe req-d5d1c948-a1fa-4f4e-88e8-ae4fd3e09c51 service nova] Releasing lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.809815] env[68437]: DEBUG nova.compute.manager [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1084.816144] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1084.816144] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5217dbfd-8c4b-3b36-dc80-bb273358ce79" [ 1084.816144] env[68437]: _type = "Task" [ 1084.816144] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.827397] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5217dbfd-8c4b-3b36-dc80-bb273358ce79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.922057] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5261f01e-f70c-4bd7-2278-4bde06188dd3, 'name': SearchDatastore_Task, 'duration_secs': 0.028855} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.922896] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e21ac57-cce3-4bff-a38f-bd4808716347 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.930436] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1084.930436] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5226d01b-1a30-953c-f342-01fc3b205942" [ 1084.930436] env[68437]: _type = "Task" [ 1084.930436] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.939333] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5226d01b-1a30-953c-f342-01fc3b205942, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.986417] env[68437]: DEBUG nova.compute.manager [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1084.986672] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1084.987964] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8a6fac-4a94-4ad8-8597-f1145f1b9c99 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.996505] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1084.996801] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06e4b1d7-bcce-4a99-9ffd-5e72533d44bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.003666] env[68437]: DEBUG oslo_vmware.api [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 1085.003666] env[68437]: value = "task-2944770" [ 1085.003666] env[68437]: _type = "Task" [ 1085.003666] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.011777] env[68437]: DEBUG oslo_vmware.api [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944770, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.135318] env[68437]: DEBUG nova.scheduler.client.report [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.149669] env[68437]: DEBUG oslo_vmware.api [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944767, 'name': PowerOnVM_Task, 'duration_secs': 0.572161} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.150591] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.150809] env[68437]: DEBUG nova.compute.manager [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.151611] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfb053a-7562-4342-a050-0fb98167b2fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.211434] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.256914] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944769, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.334891] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5217dbfd-8c4b-3b36-dc80-bb273358ce79, 'name': SearchDatastore_Task, 'duration_secs': 0.031132} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.337664] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.337664] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 2b985ca2-f0d1-4937-aa33-154aa53b0a40/2b985ca2-f0d1-4937-aa33-154aa53b0a40.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.337664] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9bd4adcd-01ee-424c-89fa-214d9c9bb096 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.345868] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1085.345868] env[68437]: value = "task-2944771" [ 1085.345868] env[68437]: _type = "Task" [ 1085.345868] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.355540] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944771, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.445629] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5226d01b-1a30-953c-f342-01fc3b205942, 'name': SearchDatastore_Task, 'duration_secs': 0.016214} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.445960] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.446263] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 8d87308a-5583-4785-9f10-13a6f9b5fe98/8d87308a-5583-4785-9f10-13a6f9b5fe98.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1085.446557] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8808e4f9-e359-4b54-860d-1c6a334d27af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.455337] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1085.455337] env[68437]: value = "task-2944772" [ 1085.455337] env[68437]: _type = "Task" [ 1085.455337] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.465561] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.514573] env[68437]: DEBUG oslo_vmware.api [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944770, 'name': PowerOffVM_Task, 'duration_secs': 0.238094} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.514959] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1085.515306] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1085.515592] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd90520c-c4c1-4e98-a038-09692387bd2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.599054] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1085.599054] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1085.599054] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Deleting the datastore file [datastore1] 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1085.599054] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41bd8c32-c858-4959-8d5d-50722e8330bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.607026] env[68437]: DEBUG oslo_vmware.api [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for the task: (returnval){ [ 1085.607026] env[68437]: value = "task-2944774" [ 1085.607026] env[68437]: _type = "Task" [ 1085.607026] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.619634] env[68437]: DEBUG oslo_vmware.api [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.640906] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.844s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.643769] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.409s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.645916] env[68437]: INFO nova.compute.claims [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1085.667940] env[68437]: INFO nova.scheduler.client.report [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Deleted allocations for instance 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439 [ 1085.672401] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.756628] env[68437]: DEBUG oslo_vmware.api [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944769, 'name': PowerOnVM_Task, 'duration_secs': 0.627454} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.756903] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.757225] env[68437]: INFO nova.compute.manager [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Took 8.94 seconds to spawn the instance on the hypervisor. [ 1085.757439] env[68437]: DEBUG nova.compute.manager [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.759021] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f02cf74-c135-4b4b-ab9b-181a81f8c689 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.829049] env[68437]: DEBUG nova.compute.manager [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1085.861285] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1085.863017] env[68437]: DEBUG nova.virt.hardware [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1085.864033] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f6c168-ad58-4a85-abf2-555e859f5aa2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.870231] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944771, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.878957] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d82efed6-dfdf-41ac-8cf6-3830217adafd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.967840] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944772, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.120592] env[68437]: DEBUG oslo_vmware.api [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.167831] env[68437]: DEBUG nova.compute.manager [req-753305e5-eba0-4029-88fe-0f60d901b0ad req-3d825ba3-42a1-40cc-8384-56165534e588 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Received event network-vif-plugged-c58ce980-01f0-476a-b297-adac9a7fcdef {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1086.167831] env[68437]: DEBUG oslo_concurrency.lockutils [req-753305e5-eba0-4029-88fe-0f60d901b0ad req-3d825ba3-42a1-40cc-8384-56165534e588 service nova] Acquiring lock "4abf1477-2f0e-4a13-884a-c19420b3e435-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.167831] env[68437]: DEBUG oslo_concurrency.lockutils [req-753305e5-eba0-4029-88fe-0f60d901b0ad req-3d825ba3-42a1-40cc-8384-56165534e588 service nova] Lock "4abf1477-2f0e-4a13-884a-c19420b3e435-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.167831] env[68437]: DEBUG oslo_concurrency.lockutils [req-753305e5-eba0-4029-88fe-0f60d901b0ad req-3d825ba3-42a1-40cc-8384-56165534e588 service nova] Lock "4abf1477-2f0e-4a13-884a-c19420b3e435-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.168415] env[68437]: DEBUG nova.compute.manager [req-753305e5-eba0-4029-88fe-0f60d901b0ad req-3d825ba3-42a1-40cc-8384-56165534e588 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] No waiting events found dispatching network-vif-plugged-c58ce980-01f0-476a-b297-adac9a7fcdef {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1086.168415] env[68437]: WARNING nova.compute.manager [req-753305e5-eba0-4029-88fe-0f60d901b0ad req-3d825ba3-42a1-40cc-8384-56165534e588 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Received unexpected event network-vif-plugged-c58ce980-01f0-476a-b297-adac9a7fcdef for instance with vm_state building and task_state spawning. [ 1086.179070] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7bf641ac-aaf1-47f7-afce-a1870a5944b6 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "987ed4b2-5c7a-4c7b-a7a5-66b4e515b439" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.041s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.247195] env[68437]: DEBUG nova.network.neutron [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Successfully updated port: c58ce980-01f0-476a-b297-adac9a7fcdef {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1086.274782] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "cf691a81-60e3-40ed-ba80-8f481ff2554b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.275081] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "cf691a81-60e3-40ed-ba80-8f481ff2554b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.275294] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "cf691a81-60e3-40ed-ba80-8f481ff2554b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.275475] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "cf691a81-60e3-40ed-ba80-8f481ff2554b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.275644] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "cf691a81-60e3-40ed-ba80-8f481ff2554b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.277925] env[68437]: INFO nova.compute.manager [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Terminating instance [ 1086.285382] env[68437]: INFO nova.compute.manager [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Took 20.75 seconds to build instance. [ 1086.358401] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944771, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534928} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.358401] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 2b985ca2-f0d1-4937-aa33-154aa53b0a40/2b985ca2-f0d1-4937-aa33-154aa53b0a40.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1086.358401] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1086.358695] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e7a48f2-8441-4f30-9d79-7afcaca410b3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.368152] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1086.368152] env[68437]: value = "task-2944775" [ 1086.368152] env[68437]: _type = "Task" [ 1086.368152] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.379858] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944775, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.467329] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.784885} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.467665] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 8d87308a-5583-4785-9f10-13a6f9b5fe98/8d87308a-5583-4785-9f10-13a6f9b5fe98.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1086.467890] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1086.468350] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82d7350f-f5e2-4117-8a66-dd698a2f9116 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.475355] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1086.475355] env[68437]: value = "task-2944776" [ 1086.475355] env[68437]: _type = "Task" [ 1086.475355] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.486587] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944776, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.518754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.518754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.518754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.518754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.518754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.520514] env[68437]: INFO nova.compute.manager [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Terminating instance [ 1086.620863] env[68437]: DEBUG oslo_vmware.api [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Task: {'id': task-2944774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.893373} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.621161] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1086.621349] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1086.621526] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1086.621698] env[68437]: INFO nova.compute.manager [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1086.621957] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1086.622182] env[68437]: DEBUG nova.compute.manager [-] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1086.622278] env[68437]: DEBUG nova.network.neutron [-] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1086.647297] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.647577] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.750619] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-4abf1477-2f0e-4a13-884a-c19420b3e435" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.750769] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-4abf1477-2f0e-4a13-884a-c19420b3e435" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.750926] env[68437]: DEBUG nova.network.neutron [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1086.788201] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d810d016-d335-4c29-b5d8-d7c8ac481263 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.270s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.793417] env[68437]: DEBUG nova.compute.manager [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1086.793624] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1086.794983] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14d020d-1fb4-4f3e-8177-d69351781a79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.806585] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1086.809983] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d6030b9-6400-4662-add6-9cec0a62e953 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.819429] env[68437]: DEBUG oslo_vmware.api [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1086.819429] env[68437]: value = "task-2944777" [ 1086.819429] env[68437]: _type = "Task" [ 1086.819429] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.831211] env[68437]: DEBUG oslo_vmware.api [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944777, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.885525] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944775, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079638} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.889199] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.890675] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646ea508-dd6b-4d21-927c-ac1ae1cdcd58 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.932676] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 2b985ca2-f0d1-4937-aa33-154aa53b0a40/2b985ca2-f0d1-4937-aa33-154aa53b0a40.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.934846] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85167d14-d546-4ec0-8d93-1d1888330ac0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.955203] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bf8b07-3fcc-4cbe-a425-551a373a6b54 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.965679] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d64527-11f0-43ed-bbf4-4abe01db2715 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.969079] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1086.969079] env[68437]: value = "task-2944778" [ 1086.969079] env[68437]: _type = "Task" [ 1086.969079] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.007357] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b849d4db-06b2-41c5-9a38-5badd859cb1d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.013326] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944778, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.019324] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944776, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087872} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.021554] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1087.022497] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27391d7-3e4b-482d-82cc-5cbc2f56e530 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.026054] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7b35e5-8c50-4442-9573-b3e5cf88ded8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.033224] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "refresh_cache-bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.033224] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquired lock "refresh_cache-bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.033224] env[68437]: DEBUG nova.network.neutron [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1087.044034] env[68437]: DEBUG nova.compute.provider_tree [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.068876] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 8d87308a-5583-4785-9f10-13a6f9b5fe98/8d87308a-5583-4785-9f10-13a6f9b5fe98.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1087.070526] env[68437]: DEBUG nova.scheduler.client.report [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.073824] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0988d1a3-6f0b-49b5-85b4-d3bde31fb5ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.098944] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1087.098944] env[68437]: value = "task-2944779" [ 1087.098944] env[68437]: _type = "Task" [ 1087.098944] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.107102] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944779, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.151658] env[68437]: DEBUG nova.compute.utils [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1087.287267] env[68437]: DEBUG nova.network.neutron [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1087.330383] env[68437]: DEBUG oslo_vmware.api [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944777, 'name': PowerOffVM_Task, 'duration_secs': 0.258015} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.330596] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1087.330815] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1087.331335] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92ad8c21-a8ed-452a-a555-2855790ec9d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.403397] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1087.403397] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1087.403397] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Deleting the datastore file [datastore2] cf691a81-60e3-40ed-ba80-8f481ff2554b {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.403827] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77712460-1510-47fb-b464-04698e8a8277 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.414972] env[68437]: DEBUG oslo_vmware.api [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1087.414972] env[68437]: value = "task-2944781" [ 1087.414972] env[68437]: _type = "Task" [ 1087.414972] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.425013] env[68437]: DEBUG oslo_vmware.api [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.469253] env[68437]: DEBUG nova.network.neutron [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Updating instance_info_cache with network_info: [{"id": "c58ce980-01f0-476a-b297-adac9a7fcdef", "address": "fa:16:3e:3f:d4:d1", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58ce980-01", "ovs_interfaceid": "c58ce980-01f0-476a-b297-adac9a7fcdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.485012] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944778, 'name': ReconfigVM_Task, 'duration_secs': 0.506242} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.485364] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 2b985ca2-f0d1-4937-aa33-154aa53b0a40/2b985ca2-f0d1-4937-aa33-154aa53b0a40.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.486030] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf4d7b2e-4cf0-44b6-9f72-91baa5260195 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.494321] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1087.494321] env[68437]: value = "task-2944782" [ 1087.494321] env[68437]: _type = "Task" [ 1087.494321] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.508441] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944782, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.552502] env[68437]: DEBUG nova.network.neutron [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1087.591047] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.947s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.591391] env[68437]: DEBUG nova.compute.manager [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1087.593983] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.383s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.595451] env[68437]: INFO nova.compute.claims [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1087.607688] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944779, 'name': ReconfigVM_Task, 'duration_secs': 0.38373} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.611047] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 8d87308a-5583-4785-9f10-13a6f9b5fe98/8d87308a-5583-4785-9f10-13a6f9b5fe98.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.611047] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd0d5507-f3a7-46a5-8b79-ea12a6a6efda {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.616557] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1087.616557] env[68437]: value = "task-2944783" [ 1087.616557] env[68437]: _type = "Task" [ 1087.616557] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.620503] env[68437]: DEBUG nova.network.neutron [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.629290] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944783, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.655126] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.685768] env[68437]: DEBUG nova.network.neutron [-] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.927449] env[68437]: DEBUG oslo_vmware.api [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294396} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.927858] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.928183] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1087.928479] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1087.928776] env[68437]: INFO nova.compute.manager [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1087.929175] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1087.929474] env[68437]: DEBUG nova.compute.manager [-] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1087.929625] env[68437]: DEBUG nova.network.neutron [-] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1087.980287] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-4abf1477-2f0e-4a13-884a-c19420b3e435" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.980627] env[68437]: DEBUG nova.compute.manager [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Instance network_info: |[{"id": "c58ce980-01f0-476a-b297-adac9a7fcdef", "address": "fa:16:3e:3f:d4:d1", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58ce980-01", "ovs_interfaceid": "c58ce980-01f0-476a-b297-adac9a7fcdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1087.981084] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:d4:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c58ce980-01f0-476a-b297-adac9a7fcdef', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.989424] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1087.991918] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1087.992182] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b781139-99b5-478a-92f9-a2c71007fc85 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.022911] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944782, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.025037] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.025037] env[68437]: value = "task-2944784" [ 1088.025037] env[68437]: _type = "Task" [ 1088.025037] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.034277] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944784, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.100212] env[68437]: DEBUG nova.compute.utils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1088.103588] env[68437]: DEBUG nova.compute.manager [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1088.103915] env[68437]: DEBUG nova.network.neutron [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1088.123176] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Releasing lock "refresh_cache-bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.123666] env[68437]: DEBUG nova.compute.manager [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1088.123928] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1088.129171] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ca8200-4b6b-465e-8070-562d1d2cfcb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.132915] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944783, 'name': Rename_Task, 'duration_secs': 0.157562} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.133735] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1088.134455] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a7bfdc5-2643-4dab-a27b-ec7759b4c1f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.142173] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1088.146856] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39a71b89-c7f8-454f-abca-2c6fa7abc004 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.149287] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1088.149287] env[68437]: value = "task-2944785" [ 1088.149287] env[68437]: _type = "Task" [ 1088.149287] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.156760] env[68437]: DEBUG oslo_vmware.api [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1088.156760] env[68437]: value = "task-2944786" [ 1088.156760] env[68437]: _type = "Task" [ 1088.156760] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.157804] env[68437]: DEBUG nova.policy [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '894a53f57a104c51945fa90c168a0483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68baf1daffa842b4adb854fe0cec9524', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1088.169806] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.176549] env[68437]: DEBUG oslo_vmware.api [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.188025] env[68437]: INFO nova.compute.manager [-] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Took 1.57 seconds to deallocate network for instance. [ 1088.286332] env[68437]: DEBUG nova.compute.manager [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Received event network-changed-c58ce980-01f0-476a-b297-adac9a7fcdef {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1088.286456] env[68437]: DEBUG nova.compute.manager [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Refreshing instance network info cache due to event network-changed-c58ce980-01f0-476a-b297-adac9a7fcdef. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1088.286687] env[68437]: DEBUG oslo_concurrency.lockutils [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] Acquiring lock "refresh_cache-4abf1477-2f0e-4a13-884a-c19420b3e435" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.286857] env[68437]: DEBUG oslo_concurrency.lockutils [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] Acquired lock "refresh_cache-4abf1477-2f0e-4a13-884a-c19420b3e435" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.287070] env[68437]: DEBUG nova.network.neutron [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Refreshing network info cache for port c58ce980-01f0-476a-b297-adac9a7fcdef {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1088.517058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.517188] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.517850] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.517850] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.517850] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.520399] env[68437]: INFO nova.compute.manager [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Terminating instance [ 1088.526769] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944782, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.537323] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944784, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.573609] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquiring lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.574280] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.574554] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquiring lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.574929] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.575152] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.577547] env[68437]: INFO nova.compute.manager [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Terminating instance [ 1088.604296] env[68437]: DEBUG nova.compute.manager [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1088.650577] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.650577] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.650869] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.650924] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.651102] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.659711] env[68437]: INFO nova.compute.manager [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Terminating instance [ 1088.674677] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944785, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.675065] env[68437]: DEBUG oslo_vmware.api [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.697271] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.709824] env[68437]: DEBUG nova.network.neutron [-] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.737028] env[68437]: DEBUG nova.network.neutron [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Successfully created port: 94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1088.740194] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.740455] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.740719] env[68437]: INFO nova.compute.manager [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Attaching volume eecd3a07-849b-4e77-9f5b-d19317a41bef to /dev/sdb [ 1088.793580] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cd1fe8-42da-4339-96d7-9ea44bb4c202 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.809958] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f997260-6a85-499c-9eee-f6331d776d0b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.829258] env[68437]: DEBUG nova.virt.block_device [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Updating existing volume attachment record: c2abc95c-2c85-497a-a192-dc78e09527e6 {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1088.943615] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42123827-0ab3-414b-8a10-5f7835ae1ad1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.953240] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0d31db-9fac-4741-a145-1c8690015b01 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.992554] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4bbf8d-3581-47d5-83ef-e296d0639fa2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.000478] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3572a7f-1c5b-451d-9b56-698ef1d9bfa2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.014562] env[68437]: DEBUG nova.compute.provider_tree [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.024214] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944782, 'name': Rename_Task, 'duration_secs': 1.219377} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.025028] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1089.025028] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba9899df-476e-4c51-a1e5-f08c4e5287a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.028011] env[68437]: DEBUG nova.compute.manager [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.028169] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.031796] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4391738e-350d-4418-a178-e4da9150764a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.038311] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1089.038311] env[68437]: value = "task-2944788" [ 1089.038311] env[68437]: _type = "Task" [ 1089.038311] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.046060] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944784, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.048631] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.048905] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf0ace03-b918-4a82-9de7-cba8bde55cb2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.053902] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.062910] env[68437]: DEBUG oslo_vmware.api [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1089.062910] env[68437]: value = "task-2944789" [ 1089.062910] env[68437]: _type = "Task" [ 1089.062910] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.072890] env[68437]: DEBUG oslo_vmware.api [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.078986] env[68437]: DEBUG nova.network.neutron [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Updated VIF entry in instance network info cache for port c58ce980-01f0-476a-b297-adac9a7fcdef. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1089.078986] env[68437]: DEBUG nova.network.neutron [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Updating instance_info_cache with network_info: [{"id": "c58ce980-01f0-476a-b297-adac9a7fcdef", "address": "fa:16:3e:3f:d4:d1", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58ce980-01", "ovs_interfaceid": "c58ce980-01f0-476a-b297-adac9a7fcdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.081404] env[68437]: DEBUG nova.compute.manager [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.083826] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.085250] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01606b38-b2ed-4f80-8c36-d8c8e2962a3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.094650] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.094905] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c48c05e4-806d-4331-a73c-33b773a3f757 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.105857] env[68437]: DEBUG oslo_vmware.api [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1089.105857] env[68437]: value = "task-2944791" [ 1089.105857] env[68437]: _type = "Task" [ 1089.105857] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.115462] env[68437]: DEBUG oslo_vmware.api [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.164581] env[68437]: DEBUG oslo_vmware.api [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944785, 'name': PowerOnVM_Task, 'duration_secs': 0.821659} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.168026] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1089.168310] env[68437]: INFO nova.compute.manager [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Took 9.99 seconds to spawn the instance on the hypervisor. [ 1089.168544] env[68437]: DEBUG nova.compute.manager [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1089.169186] env[68437]: DEBUG nova.compute.manager [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.169521] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.170158] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb8d0c5-0dd9-4e51-9470-2c2f2fa1fbf6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.173093] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b088adf1-e275-4956-9411-a8258aa9d175 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.181929] env[68437]: DEBUG oslo_vmware.api [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944786, 'name': PowerOffVM_Task, 'duration_secs': 0.665026} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.186496] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.186675] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.186949] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.187379] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e099104-868e-43a5-bc92-e01590bc051e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.188726] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9029c3c3-15da-43dc-abc4-e63f78dd7287 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.197379] env[68437]: DEBUG oslo_vmware.api [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1089.197379] env[68437]: value = "task-2944793" [ 1089.197379] env[68437]: _type = "Task" [ 1089.197379] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.208632] env[68437]: DEBUG oslo_vmware.api [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.212514] env[68437]: INFO nova.compute.manager [-] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Took 1.28 seconds to deallocate network for instance. [ 1089.221690] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.221960] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.222176] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Deleting the datastore file [datastore1] bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.222741] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0427e38-4e39-4227-910f-eed04a5273fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.231652] env[68437]: DEBUG oslo_vmware.api [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for the task: (returnval){ [ 1089.231652] env[68437]: value = "task-2944795" [ 1089.231652] env[68437]: _type = "Task" [ 1089.231652] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.242540] env[68437]: DEBUG oslo_vmware.api [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.520564] env[68437]: DEBUG nova.scheduler.client.report [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.539510] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944784, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.549052] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944788, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.572574] env[68437]: DEBUG oslo_vmware.api [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.581583] env[68437]: DEBUG oslo_concurrency.lockutils [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] Releasing lock "refresh_cache-4abf1477-2f0e-4a13-884a-c19420b3e435" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.581864] env[68437]: DEBUG nova.compute.manager [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Received event network-changed-520c7db4-23e9-44bf-846b-9f1eb94579f7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1089.582041] env[68437]: DEBUG nova.compute.manager [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Refreshing instance network info cache due to event network-changed-520c7db4-23e9-44bf-846b-9f1eb94579f7. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1089.582262] env[68437]: DEBUG oslo_concurrency.lockutils [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] Acquiring lock "refresh_cache-76d97a56-21a2-4363-a987-ef872f056510" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.582405] env[68437]: DEBUG oslo_concurrency.lockutils [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] Acquired lock "refresh_cache-76d97a56-21a2-4363-a987-ef872f056510" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.582565] env[68437]: DEBUG nova.network.neutron [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Refreshing network info cache for port 520c7db4-23e9-44bf-846b-9f1eb94579f7 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1089.611481] env[68437]: DEBUG oslo_vmware.api [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.617737] env[68437]: DEBUG nova.compute.manager [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1089.645795] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1089.646077] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1089.646272] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1089.646475] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1089.646628] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1089.646777] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1089.646993] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1089.647168] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1089.647335] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1089.647495] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1089.647666] env[68437]: DEBUG nova.virt.hardware [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1089.648539] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635139d6-4c42-4010-bf83-bcea7e4c7d1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.657199] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1206d4ab-f8e6-4089-aaa4-432674336df4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.697337] env[68437]: INFO nova.compute.manager [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Took 18.19 seconds to build instance. [ 1089.706936] env[68437]: DEBUG oslo_vmware.api [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944793, 'name': PowerOffVM_Task, 'duration_secs': 0.300816} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.707222] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.707459] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.707630] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-891f0b5f-32dd-400e-bfb1-de0a001a53fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.718901] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.743254] env[68437]: DEBUG oslo_vmware.api [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Task: {'id': task-2944795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190526} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.743557] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1089.743867] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1089.744099] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1089.744284] env[68437]: INFO nova.compute.manager [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1089.744525] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1089.744725] env[68437]: DEBUG nova.compute.manager [-] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1089.744810] env[68437]: DEBUG nova.network.neutron [-] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1089.793817] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.794097] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.794258] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Deleting the datastore file [datastore1] 27429c12-ce0a-4e21-ac1b-6862a8063a9f {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.794541] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-178416f4-bb27-4f57-bb74-96f545638628 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.802106] env[68437]: DEBUG oslo_vmware.api [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1089.802106] env[68437]: value = "task-2944797" [ 1089.802106] env[68437]: _type = "Task" [ 1089.802106] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.811705] env[68437]: DEBUG oslo_vmware.api [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.987456] env[68437]: DEBUG nova.network.neutron [-] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1090.025488] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.026275] env[68437]: DEBUG nova.compute.manager [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1090.029414] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.357s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.029637] env[68437]: DEBUG nova.objects.instance [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1090.049982] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944784, 'name': CreateVM_Task, 'duration_secs': 2.022374} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.050238] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1090.051560] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.051791] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.052201] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1090.052483] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6293ed94-1722-4080-b3a9-d7b1ee8f8952 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.058610] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944788, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.063179] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1090.063179] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5284ac06-499b-bc66-fa7f-6fe22792431a" [ 1090.063179] env[68437]: _type = "Task" [ 1090.063179] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.080349] env[68437]: DEBUG oslo_vmware.api [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.087902] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5284ac06-499b-bc66-fa7f-6fe22792431a, 'name': SearchDatastore_Task} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.088614] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.088837] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1090.089096] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.089251] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.089433] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1090.090103] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-320f0f51-3220-4893-8c3c-15076fec300a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.102482] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1090.102681] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1090.103600] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afda36f1-ef4f-4e5f-ae57-c11af255753e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.115414] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1090.115414] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5285f418-14f5-c545-a116-e48dee74b677" [ 1090.115414] env[68437]: _type = "Task" [ 1090.115414] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.118424] env[68437]: DEBUG oslo_vmware.api [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944791, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.129544] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5285f418-14f5-c545-a116-e48dee74b677, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.202038] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5877e19-e752-4c37-ac65-f87c1871f5eb tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "8d87308a-5583-4785-9f10-13a6f9b5fe98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.707s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.317874] env[68437]: DEBUG oslo_vmware.api [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275878} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.321059] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.322474] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.322474] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.322474] env[68437]: INFO nova.compute.manager [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1090.322474] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1090.324665] env[68437]: DEBUG nova.compute.manager [-] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1090.324665] env[68437]: DEBUG nova.network.neutron [-] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1090.328743] env[68437]: DEBUG nova.compute.manager [req-9c341f7a-8a06-4ac6-be21-0085da5c22c4 req-7649dc58-3d83-4716-afd4-7800a97a30a0 service nova] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Received event network-vif-deleted-020f4f15-f02d-4a17-a872-71d79b1ea226 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1090.342614] env[68437]: DEBUG nova.compute.manager [req-576422d5-7b79-4386-b05b-0fb016712dcf req-19bddc9e-f937-4d33-a90e-812abc2c2ad2 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Received event network-vif-plugged-94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1090.342808] env[68437]: DEBUG oslo_concurrency.lockutils [req-576422d5-7b79-4386-b05b-0fb016712dcf req-19bddc9e-f937-4d33-a90e-812abc2c2ad2 service nova] Acquiring lock "191b441c-2c9f-48f9-b83a-d539722e6375-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.342999] env[68437]: DEBUG oslo_concurrency.lockutils [req-576422d5-7b79-4386-b05b-0fb016712dcf req-19bddc9e-f937-4d33-a90e-812abc2c2ad2 service nova] Lock "191b441c-2c9f-48f9-b83a-d539722e6375-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.343181] env[68437]: DEBUG oslo_concurrency.lockutils [req-576422d5-7b79-4386-b05b-0fb016712dcf req-19bddc9e-f937-4d33-a90e-812abc2c2ad2 service nova] Lock "191b441c-2c9f-48f9-b83a-d539722e6375-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.343352] env[68437]: DEBUG nova.compute.manager [req-576422d5-7b79-4386-b05b-0fb016712dcf req-19bddc9e-f937-4d33-a90e-812abc2c2ad2 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] No waiting events found dispatching network-vif-plugged-94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1090.343510] env[68437]: WARNING nova.compute.manager [req-576422d5-7b79-4386-b05b-0fb016712dcf req-19bddc9e-f937-4d33-a90e-812abc2c2ad2 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Received unexpected event network-vif-plugged-94e0ca43-56a9-44cc-b9f1-0fa484701ba2 for instance with vm_state building and task_state spawning. [ 1090.431763] env[68437]: DEBUG nova.network.neutron [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Successfully updated port: 94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1090.459818] env[68437]: DEBUG nova.network.neutron [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Updated VIF entry in instance network info cache for port 520c7db4-23e9-44bf-846b-9f1eb94579f7. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1090.459818] env[68437]: DEBUG nova.network.neutron [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Updating instance_info_cache with network_info: [{"id": "520c7db4-23e9-44bf-846b-9f1eb94579f7", "address": "fa:16:3e:23:fb:b9", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap520c7db4-23", "ovs_interfaceid": "520c7db4-23e9-44bf-846b-9f1eb94579f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.490403] env[68437]: DEBUG nova.network.neutron [-] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.534645] env[68437]: DEBUG nova.compute.utils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1090.536121] env[68437]: DEBUG nova.compute.manager [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1090.538645] env[68437]: DEBUG nova.network.neutron [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1090.552795] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944788, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.579107] env[68437]: DEBUG oslo_vmware.api [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944789, 'name': PowerOffVM_Task, 'duration_secs': 1.124095} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.579340] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.579513] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.579770] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f83d8d0-95f8-4fd6-90d6-43d86e854b51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.591919] env[68437]: DEBUG nova.policy [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa2e8222eaf14cef93e64347789cff72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29df9774929e4738b8ee79216c45dbdd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1090.613641] env[68437]: DEBUG oslo_vmware.api [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944791, 'name': PowerOffVM_Task, 'duration_secs': 1.082364} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.613979] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.614169] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.614420] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d8ba6ca-cc0b-46a1-87fa-0ed3293cf788 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.632827] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5285f418-14f5-c545-a116-e48dee74b677, 'name': SearchDatastore_Task, 'duration_secs': 0.014261} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.633772] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3012670b-d350-4392-918f-ca475d81b065 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.642135] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1090.642135] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5277aa0c-c0a9-51c4-8899-9328c0a9b78e" [ 1090.642135] env[68437]: _type = "Task" [ 1090.642135] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.654199] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5277aa0c-c0a9-51c4-8899-9328c0a9b78e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.655390] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.655695] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.655971] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Deleting the datastore file [datastore1] b81a414d-51bf-4f08-b0d3-a19a7aa4efe5 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.656343] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f533ea69-79b0-45f9-9ebf-89ff2aa8a9d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.665443] env[68437]: DEBUG oslo_vmware.api [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for the task: (returnval){ [ 1090.665443] env[68437]: value = "task-2944800" [ 1090.665443] env[68437]: _type = "Task" [ 1090.665443] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.678456] env[68437]: DEBUG oslo_vmware.api [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944800, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.892603] env[68437]: DEBUG nova.network.neutron [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Successfully created port: 9e63795b-1911-4125-b17f-8b478af321a0 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1090.934736] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.934976] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.935139] env[68437]: DEBUG nova.network.neutron [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1090.938478] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.938478] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.938478] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Deleting the datastore file [datastore2] ccad008b-0a3a-4234-9c4c-c3a5230a938e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.938892] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7be68363-2cfa-4367-93e3-17fcf9e189bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.947123] env[68437]: DEBUG oslo_vmware.api [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for the task: (returnval){ [ 1090.947123] env[68437]: value = "task-2944801" [ 1090.947123] env[68437]: _type = "Task" [ 1090.947123] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.956033] env[68437]: DEBUG oslo_vmware.api [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944801, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.963670] env[68437]: DEBUG oslo_concurrency.lockutils [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] Releasing lock "refresh_cache-76d97a56-21a2-4363-a987-ef872f056510" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.963976] env[68437]: DEBUG nova.compute.manager [req-c5f59e45-6a0b-4b9f-a4a3-2cd4c4f385ee req-639d4608-78a5-49db-bfa0-140f20a6f7fa service nova] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Received event network-vif-deleted-b15f4c4a-122d-4231-be11-a7d9e18e59ed {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1090.986140] env[68437]: DEBUG oslo_concurrency.lockutils [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquiring lock "8d87308a-5583-4785-9f10-13a6f9b5fe98" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.986466] env[68437]: DEBUG oslo_concurrency.lockutils [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "8d87308a-5583-4785-9f10-13a6f9b5fe98" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.986677] env[68437]: DEBUG oslo_concurrency.lockutils [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquiring lock "8d87308a-5583-4785-9f10-13a6f9b5fe98-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.986884] env[68437]: DEBUG oslo_concurrency.lockutils [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "8d87308a-5583-4785-9f10-13a6f9b5fe98-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.987086] env[68437]: DEBUG oslo_concurrency.lockutils [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "8d87308a-5583-4785-9f10-13a6f9b5fe98-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.989205] env[68437]: INFO nova.compute.manager [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Terminating instance [ 1090.995020] env[68437]: INFO nova.compute.manager [-] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Took 1.25 seconds to deallocate network for instance. [ 1091.040194] env[68437]: DEBUG nova.compute.manager [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1091.043317] env[68437]: DEBUG oslo_concurrency.lockutils [None req-820e55dc-19b8-4e6a-a571-9f951d0f1bc6 tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.047514] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.352s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.048436] env[68437]: DEBUG nova.objects.instance [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lazy-loading 'resources' on Instance uuid 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.056576] env[68437]: DEBUG oslo_vmware.api [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944788, 'name': PowerOnVM_Task, 'duration_secs': 1.725034} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.056861] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1091.057088] env[68437]: INFO nova.compute.manager [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Took 9.52 seconds to spawn the instance on the hypervisor. [ 1091.057896] env[68437]: DEBUG nova.compute.manager [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1091.058135] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4493f2a7-7ff0-4dbd-b4f1-f664d07301ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.153384] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5277aa0c-c0a9-51c4-8899-9328c0a9b78e, 'name': SearchDatastore_Task, 'duration_secs': 0.024814} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.153717] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.153988] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 4abf1477-2f0e-4a13-884a-c19420b3e435/4abf1477-2f0e-4a13-884a-c19420b3e435.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1091.154273] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c4b4385-8aa0-4391-b488-f7a2e087cfb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.162377] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1091.162377] env[68437]: value = "task-2944802" [ 1091.162377] env[68437]: _type = "Task" [ 1091.162377] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.171960] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.173636] env[68437]: DEBUG nova.network.neutron [-] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.179793] env[68437]: DEBUG oslo_vmware.api [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Task: {'id': task-2944800, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396616} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.179793] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.179793] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.179793] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.179793] env[68437]: INFO nova.compute.manager [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1091.179793] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1091.180082] env[68437]: DEBUG nova.compute.manager [-] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1091.180082] env[68437]: DEBUG nova.network.neutron [-] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1091.460865] env[68437]: DEBUG oslo_vmware.api [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Task: {'id': task-2944801, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284734} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.461187] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.461346] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1091.461523] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.461718] env[68437]: INFO nova.compute.manager [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Took 2.38 seconds to destroy the instance on the hypervisor. [ 1091.461967] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1091.462372] env[68437]: DEBUG nova.compute.manager [-] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1091.462372] env[68437]: DEBUG nova.network.neutron [-] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1091.472690] env[68437]: DEBUG nova.network.neutron [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1091.494051] env[68437]: DEBUG nova.compute.manager [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1091.494373] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.495533] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0fd93c-87a6-4667-9150-bd389461e938 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.501578] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.509239] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1091.509707] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8652a656-e685-4818-82a8-a24a23a8508b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.519746] env[68437]: DEBUG oslo_vmware.api [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1091.519746] env[68437]: value = "task-2944804" [ 1091.519746] env[68437]: _type = "Task" [ 1091.519746] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.533207] env[68437]: DEBUG oslo_vmware.api [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944804, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.596560] env[68437]: INFO nova.compute.manager [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Took 17.71 seconds to build instance. [ 1091.673586] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944802, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.674856] env[68437]: DEBUG nova.network.neutron [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Updating instance_info_cache with network_info: [{"id": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "address": "fa:16:3e:96:2b:b0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e0ca43-56", "ovs_interfaceid": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.679242] env[68437]: INFO nova.compute.manager [-] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Took 1.36 seconds to deallocate network for instance. [ 1091.839050] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72307acd-8c42-4afb-b137-c9ab92e42953 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.847774] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cda7315-a134-437a-bc1c-db46ed1a4f04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.879450] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9728d102-81d3-4ea6-817d-9d61bcf6d292 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.888124] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5a1da6-cfce-4cc9-94b1-7e218e5636c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.893506] env[68437]: DEBUG nova.network.neutron [-] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.922590] env[68437]: DEBUG nova.compute.provider_tree [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.031430] env[68437]: DEBUG oslo_vmware.api [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944804, 'name': PowerOffVM_Task, 'duration_secs': 0.228332} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.031723] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1092.031905] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1092.032179] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-540c44d4-a523-4260-8a0c-07a82e42a6fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.055626] env[68437]: DEBUG nova.compute.manager [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1092.082242] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1092.082609] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1092.082696] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1092.082877] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1092.083076] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1092.083186] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1092.083397] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1092.083554] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1092.083757] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1092.083963] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1092.084160] env[68437]: DEBUG nova.virt.hardware [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1092.085132] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0faa0ac-3a81-479c-b9e8-df648721af19 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.094428] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb389eb-9151-4de0-95ce-9a5137d16cef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.098681] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7c294e50-5c5f-4685-8a28-92060f4a6a42 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.226s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.115129] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1092.115412] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1092.115539] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Deleting the datastore file [datastore1] 8d87308a-5583-4785-9f10-13a6f9b5fe98 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.115795] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02d65058-b6c8-4d9d-a5c4-cb63165e38b3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.122784] env[68437]: DEBUG oslo_vmware.api [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for the task: (returnval){ [ 1092.122784] env[68437]: value = "task-2944806" [ 1092.122784] env[68437]: _type = "Task" [ 1092.122784] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.130773] env[68437]: DEBUG oslo_vmware.api [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.174752] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944802, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53863} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.175015] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 4abf1477-2f0e-4a13-884a-c19420b3e435/4abf1477-2f0e-4a13-884a-c19420b3e435.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1092.176430] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1092.176704] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65fac417-5b78-4c1a-ada2-dcaac6e737da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.180668] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.181492] env[68437]: DEBUG nova.compute.manager [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Instance network_info: |[{"id": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "address": "fa:16:3e:96:2b:b0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e0ca43-56", "ovs_interfaceid": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1092.181492] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:2b:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94e0ca43-56a9-44cc-b9f1-0fa484701ba2', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.190884] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.193225] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.193487] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.193853] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1092.193853] env[68437]: value = "task-2944807" [ 1092.193853] env[68437]: _type = "Task" [ 1092.193853] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.196645] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-369a2c13-1b4f-4d97-b3b9-be329be6515e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.224395] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944807, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.226127] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1092.226127] env[68437]: value = "task-2944808" [ 1092.226127] env[68437]: _type = "Task" [ 1092.226127] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.236099] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944808, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.277684] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.277981] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.278150] env[68437]: INFO nova.compute.manager [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Shelving [ 1092.364471] env[68437]: DEBUG nova.compute.manager [req-e9650846-cb61-430d-a82b-9b279cf3e259 req-08aaa557-b477-4069-9d46-8131910296c6 service nova] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Received event network-vif-deleted-b9828641-08ba-49a4-8810-3049e52c44a2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1092.364728] env[68437]: DEBUG nova.compute.manager [req-e9650846-cb61-430d-a82b-9b279cf3e259 req-08aaa557-b477-4069-9d46-8131910296c6 service nova] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Received event network-vif-deleted-583b37d4-09c0-4e4a-a343-e7f5898a9038 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1092.364923] env[68437]: DEBUG nova.compute.manager [req-e9650846-cb61-430d-a82b-9b279cf3e259 req-08aaa557-b477-4069-9d46-8131910296c6 service nova] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Received event network-vif-plugged-9e63795b-1911-4125-b17f-8b478af321a0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1092.365395] env[68437]: DEBUG oslo_concurrency.lockutils [req-e9650846-cb61-430d-a82b-9b279cf3e259 req-08aaa557-b477-4069-9d46-8131910296c6 service nova] Acquiring lock "9d54d4b6-9b92-4a24-9582-475108bf2710-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.368149] env[68437]: DEBUG oslo_concurrency.lockutils [req-e9650846-cb61-430d-a82b-9b279cf3e259 req-08aaa557-b477-4069-9d46-8131910296c6 service nova] Lock "9d54d4b6-9b92-4a24-9582-475108bf2710-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.368342] env[68437]: DEBUG oslo_concurrency.lockutils [req-e9650846-cb61-430d-a82b-9b279cf3e259 req-08aaa557-b477-4069-9d46-8131910296c6 service nova] Lock "9d54d4b6-9b92-4a24-9582-475108bf2710-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.368520] env[68437]: DEBUG nova.compute.manager [req-e9650846-cb61-430d-a82b-9b279cf3e259 req-08aaa557-b477-4069-9d46-8131910296c6 service nova] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] No waiting events found dispatching network-vif-plugged-9e63795b-1911-4125-b17f-8b478af321a0 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1092.368714] env[68437]: WARNING nova.compute.manager [req-e9650846-cb61-430d-a82b-9b279cf3e259 req-08aaa557-b477-4069-9d46-8131910296c6 service nova] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Received unexpected event network-vif-plugged-9e63795b-1911-4125-b17f-8b478af321a0 for instance with vm_state building and task_state spawning. [ 1092.374717] env[68437]: DEBUG nova.compute.manager [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Received event network-changed-94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1092.374919] env[68437]: DEBUG nova.compute.manager [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Refreshing instance network info cache due to event network-changed-94e0ca43-56a9-44cc-b9f1-0fa484701ba2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1092.375199] env[68437]: DEBUG oslo_concurrency.lockutils [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] Acquiring lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.375305] env[68437]: DEBUG oslo_concurrency.lockutils [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] Acquired lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.375470] env[68437]: DEBUG nova.network.neutron [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Refreshing network info cache for port 94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1092.396488] env[68437]: INFO nova.compute.manager [-] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Took 1.22 seconds to deallocate network for instance. [ 1092.428355] env[68437]: DEBUG nova.scheduler.client.report [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1092.459114] env[68437]: DEBUG nova.network.neutron [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Successfully updated port: 9e63795b-1911-4125-b17f-8b478af321a0 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1092.632606] env[68437]: DEBUG oslo_vmware.api [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Task: {'id': task-2944806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.301782} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.632906] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.633062] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.633257] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.633429] env[68437]: INFO nova.compute.manager [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1092.633697] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.633902] env[68437]: DEBUG nova.compute.manager [-] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1092.633992] env[68437]: DEBUG nova.network.neutron [-] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1092.714877] env[68437]: DEBUG nova.network.neutron [-] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.729617] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944807, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08657} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.734703] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1092.736998] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3b0d9b-c84a-4ccd-881f-61a516f304d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.749019] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944808, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.777776] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 4abf1477-2f0e-4a13-884a-c19420b3e435/4abf1477-2f0e-4a13-884a-c19420b3e435.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1092.778212] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5f1ea96-1681-4de0-8f5d-21499096ab8f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.812107] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1092.812107] env[68437]: value = "task-2944809" [ 1092.812107] env[68437]: _type = "Task" [ 1092.812107] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.822342] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944809, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.903961] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.935235] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.938583] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.219s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.938837] env[68437]: DEBUG nova.objects.instance [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lazy-loading 'resources' on Instance uuid cf691a81-60e3-40ed-ba80-8f481ff2554b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1092.961958] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquiring lock "refresh_cache-9d54d4b6-9b92-4a24-9582-475108bf2710" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.961958] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquired lock "refresh_cache-9d54d4b6-9b92-4a24-9582-475108bf2710" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.961958] env[68437]: DEBUG nova.network.neutron [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1092.971507] env[68437]: INFO nova.scheduler.client.report [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Deleted allocations for instance 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c [ 1093.103171] env[68437]: DEBUG nova.network.neutron [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Updated VIF entry in instance network info cache for port 94e0ca43-56a9-44cc-b9f1-0fa484701ba2. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1093.103404] env[68437]: DEBUG nova.network.neutron [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Updating instance_info_cache with network_info: [{"id": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "address": "fa:16:3e:96:2b:b0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e0ca43-56", "ovs_interfaceid": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.222615] env[68437]: INFO nova.compute.manager [-] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Took 1.76 seconds to deallocate network for instance. [ 1093.242613] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944808, 'name': CreateVM_Task, 'duration_secs': 0.700056} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.243229] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1093.243695] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.244067] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.244255] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1093.244536] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3fe1699-66fd-4fbd-bc37-e98f2e82fdce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.250619] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1093.250619] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5260151f-2ed3-7581-459d-26b0c162b7d6" [ 1093.250619] env[68437]: _type = "Task" [ 1093.250619] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.259734] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5260151f-2ed3-7581-459d-26b0c162b7d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.313087] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1093.313504] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a1e0d35-2c97-426a-b6ad-dc16547c3f3f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.324925] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944809, 'name': ReconfigVM_Task, 'duration_secs': 0.455261} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.326323] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 4abf1477-2f0e-4a13-884a-c19420b3e435/4abf1477-2f0e-4a13-884a-c19420b3e435.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1093.327038] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1093.327038] env[68437]: value = "task-2944810" [ 1093.327038] env[68437]: _type = "Task" [ 1093.327038] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.327227] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e87510c4-a18a-48c6-901a-0dbb2fd8ba8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.339483] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944810, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.341010] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1093.341010] env[68437]: value = "task-2944811" [ 1093.341010] env[68437]: _type = "Task" [ 1093.341010] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.350547] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944811, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.381226] env[68437]: DEBUG nova.network.neutron [-] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.389100] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1093.389401] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591098', 'volume_id': 'eecd3a07-849b-4e77-9f5b-d19317a41bef', 'name': 'volume-eecd3a07-849b-4e77-9f5b-d19317a41bef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ee0450b5-66ce-41ed-9f4f-7ffa7b46f769', 'attached_at': '', 'detached_at': '', 'volume_id': 'eecd3a07-849b-4e77-9f5b-d19317a41bef', 'serial': 'eecd3a07-849b-4e77-9f5b-d19317a41bef'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1093.390417] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0848e360-dfc5-481d-a8b8-78cec21667b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.409026] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0c3c6e-9bf0-40f7-ae1d-89b19b21556b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.437333] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] volume-eecd3a07-849b-4e77-9f5b-d19317a41bef/volume-eecd3a07-849b-4e77-9f5b-d19317a41bef.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.437754] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d43120d3-9774-4dba-9126-d9b9467864a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.460773] env[68437]: DEBUG oslo_vmware.api [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1093.460773] env[68437]: value = "task-2944812" [ 1093.460773] env[68437]: _type = "Task" [ 1093.460773] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.475693] env[68437]: DEBUG oslo_vmware.api [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.481850] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f2695ee-12e7-4a63-80cb-7cd3fadcf822 tempest-ServersWithSpecificFlavorTestJSON-1138545103 tempest-ServersWithSpecificFlavorTestJSON-1138545103-project-member] Lock "53c4ca02-2bc3-4a55-9aea-0e0dd669a37c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.004s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.497773] env[68437]: DEBUG nova.network.neutron [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1093.606492] env[68437]: DEBUG oslo_concurrency.lockutils [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] Releasing lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.606787] env[68437]: DEBUG nova.compute.manager [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Received event network-vif-deleted-d21ad3db-ccd9-4d63-9eb0-4620abdab063 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1093.606946] env[68437]: INFO nova.compute.manager [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Neutron deleted interface d21ad3db-ccd9-4d63-9eb0-4620abdab063; detaching it from the instance and deleting it from the info cache [ 1093.607136] env[68437]: DEBUG nova.network.neutron [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.662898] env[68437]: DEBUG nova.network.neutron [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Updating instance_info_cache with network_info: [{"id": "9e63795b-1911-4125-b17f-8b478af321a0", "address": "fa:16:3e:ec:b6:b3", "network": {"id": "51a90722-aa86-45b6-ae41-5d5922d2ad61", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-991713960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29df9774929e4738b8ee79216c45dbdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e63795b-19", "ovs_interfaceid": "9e63795b-1911-4125-b17f-8b478af321a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.698365] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2b4fa5-43ef-47df-9e2c-d374a48830d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.707080] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000d6472-b897-452c-af54-717e0e18d337 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.739392] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.740310] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb33763-7481-4a12-8e12-b8e00ab0a1fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.749021] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3f0a70-7470-48c7-bef2-7f1d77b246a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.767882] env[68437]: DEBUG nova.compute.provider_tree [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.775324] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5260151f-2ed3-7581-459d-26b0c162b7d6, 'name': SearchDatastore_Task, 'duration_secs': 0.021032} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.776154] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.776528] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1093.776899] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.777167] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.777471] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1093.778757] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12cae32f-5bf5-4ab7-963d-98f619345a0a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.792568] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1093.792759] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1093.793514] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f84db9c-6859-487c-bc92-a87f072b6dfc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.800059] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1093.800059] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f666d6-fc80-0bba-c4cd-acdab779ad76" [ 1093.800059] env[68437]: _type = "Task" [ 1093.800059] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.807308] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f666d6-fc80-0bba-c4cd-acdab779ad76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.839113] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944810, 'name': PowerOffVM_Task, 'duration_secs': 0.246082} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.839411] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1093.840224] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56067239-de91-4e2f-bc1e-6e25f426c208 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.853372] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944811, 'name': Rename_Task, 'duration_secs': 0.22265} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.866319] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1093.866542] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2b931ca-f44c-452c-ad7c-0d60d876a6dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.868627] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef28a4e-78dc-4a87-8327-def3cc22196e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.880137] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1093.880137] env[68437]: value = "task-2944813" [ 1093.880137] env[68437]: _type = "Task" [ 1093.880137] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.884109] env[68437]: INFO nova.compute.manager [-] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Took 1.25 seconds to deallocate network for instance. [ 1093.888996] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944813, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.972428] env[68437]: DEBUG oslo_vmware.api [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944812, 'name': ReconfigVM_Task, 'duration_secs': 0.370506} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.972700] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Reconfigured VM instance instance-00000053 to attach disk [datastore2] volume-eecd3a07-849b-4e77-9f5b-d19317a41bef/volume-eecd3a07-849b-4e77-9f5b-d19317a41bef.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1093.977424] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21abf342-f0c3-4050-940b-987b971a78d6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.995143] env[68437]: DEBUG oslo_vmware.api [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1093.995143] env[68437]: value = "task-2944814" [ 1093.995143] env[68437]: _type = "Task" [ 1093.995143] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.003867] env[68437]: DEBUG oslo_vmware.api [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.110436] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d252d537-d7dd-4412-9755-f7132cdcd9d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.121845] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f95dda-95ec-4378-bec6-6cbb57702488 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.154420] env[68437]: DEBUG nova.compute.manager [req-1eb20f3d-d315-4922-a904-4637defab002 req-6c53c4e2-a778-4af5-b9b6-b12ac78cbdb4 service nova] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Detach interface failed, port_id=d21ad3db-ccd9-4d63-9eb0-4620abdab063, reason: Instance ccad008b-0a3a-4234-9c4c-c3a5230a938e could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1094.165294] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Releasing lock "refresh_cache-9d54d4b6-9b92-4a24-9582-475108bf2710" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.165616] env[68437]: DEBUG nova.compute.manager [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Instance network_info: |[{"id": "9e63795b-1911-4125-b17f-8b478af321a0", "address": "fa:16:3e:ec:b6:b3", "network": {"id": "51a90722-aa86-45b6-ae41-5d5922d2ad61", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-991713960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29df9774929e4738b8ee79216c45dbdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e63795b-19", "ovs_interfaceid": "9e63795b-1911-4125-b17f-8b478af321a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1094.166028] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:b6:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1559ce49-7345-443f-bf02-4bfeb88356ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e63795b-1911-4125-b17f-8b478af321a0', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1094.173488] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Creating folder: Project (29df9774929e4738b8ee79216c45dbdd). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1094.173900] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcdaeabc-a6c6-4073-b617-003039883a0a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.186668] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Created folder: Project (29df9774929e4738b8ee79216c45dbdd) in parent group-v590848. [ 1094.186864] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Creating folder: Instances. Parent ref: group-v591100. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1094.187126] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8475eb5f-af1a-4f9e-a7a3-7e240f7f9939 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.198176] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Created folder: Instances in parent group-v591100. [ 1094.198437] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1094.198644] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1094.198858] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e7107ad-aea5-4bec-bdeb-a3fd684724df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.221344] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1094.221344] env[68437]: value = "task-2944817" [ 1094.221344] env[68437]: _type = "Task" [ 1094.221344] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.229770] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944817, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.271623] env[68437]: DEBUG nova.scheduler.client.report [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.316495] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f666d6-fc80-0bba-c4cd-acdab779ad76, 'name': SearchDatastore_Task, 'duration_secs': 0.014232} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.319531] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecbaa028-5bf8-4077-a034-a543e7234010 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.326279] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1094.326279] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5298160d-41cf-6dbd-e816-d5aab8e1a23a" [ 1094.326279] env[68437]: _type = "Task" [ 1094.326279] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.338753] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5298160d-41cf-6dbd-e816-d5aab8e1a23a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.382396] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1094.382883] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a79ba234-cc34-46b8-b929-986fd1b1995a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.394835] env[68437]: DEBUG oslo_concurrency.lockutils [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.395165] env[68437]: DEBUG oslo_vmware.api [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944813, 'name': PowerOnVM_Task, 'duration_secs': 0.508497} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.396513] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.396725] env[68437]: INFO nova.compute.manager [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1094.396951] env[68437]: DEBUG nova.compute.manager [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1094.397306] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1094.397306] env[68437]: value = "task-2944818" [ 1094.397306] env[68437]: _type = "Task" [ 1094.397306] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.398078] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36296232-bb77-4b93-9446-9e41b75d93f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.416356] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944818, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.506198] env[68437]: DEBUG oslo_vmware.api [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944814, 'name': ReconfigVM_Task, 'duration_secs': 0.181515} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.506680] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591098', 'volume_id': 'eecd3a07-849b-4e77-9f5b-d19317a41bef', 'name': 'volume-eecd3a07-849b-4e77-9f5b-d19317a41bef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ee0450b5-66ce-41ed-9f4f-7ffa7b46f769', 'attached_at': '', 'detached_at': '', 'volume_id': 'eecd3a07-849b-4e77-9f5b-d19317a41bef', 'serial': 'eecd3a07-849b-4e77-9f5b-d19317a41bef'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1094.544197] env[68437]: DEBUG nova.compute.manager [req-3909904b-7bc8-43b8-996c-3215858fa307 req-7156ac80-2873-4325-a867-dbcfdf5a1d6b service nova] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Received event network-changed-9e63795b-1911-4125-b17f-8b478af321a0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1094.544197] env[68437]: DEBUG nova.compute.manager [req-3909904b-7bc8-43b8-996c-3215858fa307 req-7156ac80-2873-4325-a867-dbcfdf5a1d6b service nova] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Refreshing instance network info cache due to event network-changed-9e63795b-1911-4125-b17f-8b478af321a0. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1094.544343] env[68437]: DEBUG oslo_concurrency.lockutils [req-3909904b-7bc8-43b8-996c-3215858fa307 req-7156ac80-2873-4325-a867-dbcfdf5a1d6b service nova] Acquiring lock "refresh_cache-9d54d4b6-9b92-4a24-9582-475108bf2710" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.544398] env[68437]: DEBUG oslo_concurrency.lockutils [req-3909904b-7bc8-43b8-996c-3215858fa307 req-7156ac80-2873-4325-a867-dbcfdf5a1d6b service nova] Acquired lock "refresh_cache-9d54d4b6-9b92-4a24-9582-475108bf2710" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.544870] env[68437]: DEBUG nova.network.neutron [req-3909904b-7bc8-43b8-996c-3215858fa307 req-7156ac80-2873-4325-a867-dbcfdf5a1d6b service nova] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Refreshing network info cache for port 9e63795b-1911-4125-b17f-8b478af321a0 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1094.733208] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944817, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.780858] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.841s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.780858] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.280s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.781961] env[68437]: DEBUG nova.objects.instance [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lazy-loading 'resources' on Instance uuid bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.804623] env[68437]: INFO nova.scheduler.client.report [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Deleted allocations for instance cf691a81-60e3-40ed-ba80-8f481ff2554b [ 1094.838274] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5298160d-41cf-6dbd-e816-d5aab8e1a23a, 'name': SearchDatastore_Task, 'duration_secs': 0.012712} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.838538] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.838838] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 191b441c-2c9f-48f9-b83a-d539722e6375/191b441c-2c9f-48f9-b83a-d539722e6375.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1094.839119] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec79798d-6b3b-43a8-a701-04ba4eceb4a4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.848620] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1094.848620] env[68437]: value = "task-2944819" [ 1094.848620] env[68437]: _type = "Task" [ 1094.848620] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.863404] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944819, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.911960] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944818, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.925984] env[68437]: INFO nova.compute.manager [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Took 18.28 seconds to build instance. [ 1095.243219] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944817, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.316140] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d149e9e8-7d3d-4f5e-b515-91d6ae05543d tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "cf691a81-60e3-40ed-ba80-8f481ff2554b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.041s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.362260] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944819, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.416350] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944818, 'name': CreateSnapshot_Task, 'duration_secs': 0.707031} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.416654] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1095.417455] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1f9f1f-b0ee-49b6-a0a6-51d77322d5c0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.432297] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7002eeb5-fbf0-45ca-b042-1f6e4fa65a89 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "4abf1477-2f0e-4a13-884a-c19420b3e435" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.804s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.451850] env[68437]: DEBUG nova.network.neutron [req-3909904b-7bc8-43b8-996c-3215858fa307 req-7156ac80-2873-4325-a867-dbcfdf5a1d6b service nova] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Updated VIF entry in instance network info cache for port 9e63795b-1911-4125-b17f-8b478af321a0. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1095.453216] env[68437]: DEBUG nova.network.neutron [req-3909904b-7bc8-43b8-996c-3215858fa307 req-7156ac80-2873-4325-a867-dbcfdf5a1d6b service nova] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Updating instance_info_cache with network_info: [{"id": "9e63795b-1911-4125-b17f-8b478af321a0", "address": "fa:16:3e:ec:b6:b3", "network": {"id": "51a90722-aa86-45b6-ae41-5d5922d2ad61", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-991713960-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29df9774929e4738b8ee79216c45dbdd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1559ce49-7345-443f-bf02-4bfeb88356ef", "external-id": "nsx-vlan-transportzone-670", "segmentation_id": 670, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e63795b-19", "ovs_interfaceid": "9e63795b-1911-4125-b17f-8b478af321a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.557511] env[68437]: DEBUG nova.objects.instance [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'flavor' on Instance uuid ee0450b5-66ce-41ed-9f4f-7ffa7b46f769 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.561364] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa9d209-db1c-4945-8736-bc7c045d9e06 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.570468] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1753a8-65e7-4143-a427-2296a055b4f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.605727] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f024e9-35cc-4172-a07a-b0704f3f5a64 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.615130] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c570776-eade-44b5-bd79-0347a4c5ddc4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.636108] env[68437]: DEBUG nova.compute.provider_tree [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.721748] env[68437]: DEBUG oslo_concurrency.lockutils [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "19dde8dd-eae6-41a0-b147-c505db1cda15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.722143] env[68437]: DEBUG oslo_concurrency.lockutils [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "19dde8dd-eae6-41a0-b147-c505db1cda15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.723351] env[68437]: DEBUG oslo_concurrency.lockutils [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "19dde8dd-eae6-41a0-b147-c505db1cda15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.723351] env[68437]: DEBUG oslo_concurrency.lockutils [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "19dde8dd-eae6-41a0-b147-c505db1cda15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.723351] env[68437]: DEBUG oslo_concurrency.lockutils [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "19dde8dd-eae6-41a0-b147-c505db1cda15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.724964] env[68437]: INFO nova.compute.manager [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Terminating instance [ 1095.739316] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944817, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.860883] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944819, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530725} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.861366] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 191b441c-2c9f-48f9-b83a-d539722e6375/191b441c-2c9f-48f9-b83a-d539722e6375.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1095.861760] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1095.862061] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-102ab954-9271-43c2-9142-93d7140c981f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.870466] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1095.870466] env[68437]: value = "task-2944820" [ 1095.870466] env[68437]: _type = "Task" [ 1095.870466] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.878944] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944820, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.944106] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1095.944106] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6204eb05-5847-4662-a53c-7f340953a94e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.953965] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1095.953965] env[68437]: value = "task-2944821" [ 1095.953965] env[68437]: _type = "Task" [ 1095.953965] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.955184] env[68437]: DEBUG oslo_concurrency.lockutils [req-3909904b-7bc8-43b8-996c-3215858fa307 req-7156ac80-2873-4325-a867-dbcfdf5a1d6b service nova] Releasing lock "refresh_cache-9d54d4b6-9b92-4a24-9582-475108bf2710" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.955453] env[68437]: DEBUG nova.compute.manager [req-3909904b-7bc8-43b8-996c-3215858fa307 req-7156ac80-2873-4325-a867-dbcfdf5a1d6b service nova] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Received event network-vif-deleted-593fcc5a-6c63-4ec2-98bd-9931c0e674d9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1095.965954] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944821, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.070159] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ba7d8a2-4020-401f-b593-cb9fe106ad56 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.329s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.139486] env[68437]: DEBUG nova.scheduler.client.report [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.232251] env[68437]: DEBUG nova.compute.manager [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1096.232511] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1096.233335] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98c3654-9657-417e-a5c3-e1cf4a4a6de3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.247640] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1096.251391] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd3b4110-15c5-4bf9-a0b0-31ae58d7125f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.252897] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944817, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.259116] env[68437]: DEBUG oslo_vmware.api [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1096.259116] env[68437]: value = "task-2944822" [ 1096.259116] env[68437]: _type = "Task" [ 1096.259116] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.273186] env[68437]: DEBUG oslo_vmware.api [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944822, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.381857] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944820, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075211} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.382190] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1096.383034] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34fe94f-1e96-44b3-a5b5-7455fe6023db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.408446] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 191b441c-2c9f-48f9-b83a-d539722e6375/191b441c-2c9f-48f9-b83a-d539722e6375.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1096.409757] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b831cbb-bc1a-4566-a078-49281b0e8c83 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.434369] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1096.434369] env[68437]: value = "task-2944823" [ 1096.434369] env[68437]: _type = "Task" [ 1096.434369] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.448486] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944823, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.472218] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944821, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.600355] env[68437]: DEBUG nova.compute.manager [req-6b4a822e-b376-4f5d-98fa-493fdef352b8 req-921bfe2f-a99f-47b8-8671-89be038d0d14 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Received event network-changed-c58ce980-01f0-476a-b297-adac9a7fcdef {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1096.600588] env[68437]: DEBUG nova.compute.manager [req-6b4a822e-b376-4f5d-98fa-493fdef352b8 req-921bfe2f-a99f-47b8-8671-89be038d0d14 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Refreshing instance network info cache due to event network-changed-c58ce980-01f0-476a-b297-adac9a7fcdef. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1096.600843] env[68437]: DEBUG oslo_concurrency.lockutils [req-6b4a822e-b376-4f5d-98fa-493fdef352b8 req-921bfe2f-a99f-47b8-8671-89be038d0d14 service nova] Acquiring lock "refresh_cache-4abf1477-2f0e-4a13-884a-c19420b3e435" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.601195] env[68437]: DEBUG oslo_concurrency.lockutils [req-6b4a822e-b376-4f5d-98fa-493fdef352b8 req-921bfe2f-a99f-47b8-8671-89be038d0d14 service nova] Acquired lock "refresh_cache-4abf1477-2f0e-4a13-884a-c19420b3e435" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.601528] env[68437]: DEBUG nova.network.neutron [req-6b4a822e-b376-4f5d-98fa-493fdef352b8 req-921bfe2f-a99f-47b8-8671-89be038d0d14 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Refreshing network info cache for port c58ce980-01f0-476a-b297-adac9a7fcdef {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1096.646528] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.648605] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.455s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.649193] env[68437]: DEBUG nova.objects.instance [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lazy-loading 'resources' on Instance uuid 27429c12-ce0a-4e21-ac1b-6862a8063a9f {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.678912] env[68437]: INFO nova.scheduler.client.report [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Deleted allocations for instance bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee [ 1096.740965] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944817, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.771124] env[68437]: DEBUG oslo_vmware.api [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944822, 'name': PowerOffVM_Task, 'duration_secs': 0.234215} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.771124] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.771124] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.771124] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d9cb51e-5649-4e1c-9bd0-847ab7d54251 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.838474] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.838683] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.838924] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Deleting the datastore file [datastore2] 19dde8dd-eae6-41a0-b147-c505db1cda15 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.839315] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c077c0e-11c0-4fdf-87d3-1e3282122534 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.848098] env[68437]: DEBUG oslo_vmware.api [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for the task: (returnval){ [ 1096.848098] env[68437]: value = "task-2944825" [ 1096.848098] env[68437]: _type = "Task" [ 1096.848098] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.858041] env[68437]: DEBUG oslo_vmware.api [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944825, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.949161] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944823, 'name': ReconfigVM_Task, 'duration_secs': 0.4534} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.950642] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 191b441c-2c9f-48f9-b83a-d539722e6375/191b441c-2c9f-48f9-b83a-d539722e6375.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.950642] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d606b68c-ae3c-439e-8446-9f0ab2bd84aa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.962911] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1096.962911] env[68437]: value = "task-2944826" [ 1096.962911] env[68437]: _type = "Task" [ 1096.962911] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.971395] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944821, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.979279] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944826, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.190998] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dff962f-c5ca-4822-a72d-296cc785b6ee tempest-ServersListShow2100Test-713358222 tempest-ServersListShow2100Test-713358222-project-member] Lock "bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.674s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.243027] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944817, 'name': CreateVM_Task, 'duration_secs': 2.980928} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.243204] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1097.243913] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.244092] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.244420] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1097.244694] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bba04f35-5baa-44bc-b5f7-4c5ee9f1dd9f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.261189] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1097.261189] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52830126-3277-6487-14a3-ded77acaa2b1" [ 1097.261189] env[68437]: _type = "Task" [ 1097.261189] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.276124] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52830126-3277-6487-14a3-ded77acaa2b1, 'name': SearchDatastore_Task} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.276428] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.276744] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1097.276889] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.277247] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.277425] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1097.279946] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c3dab18-3d18-4278-b390-9228543f7b3f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.289822] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1097.290011] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1097.290753] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54d390c4-d9c4-4711-b106-c9f9fba7cc05 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.305022] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1097.305022] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b838fa-a4df-e402-89ce-d90d86ea5a9b" [ 1097.305022] env[68437]: _type = "Task" [ 1097.305022] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.310606] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b838fa-a4df-e402-89ce-d90d86ea5a9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.359911] env[68437]: DEBUG oslo_vmware.api [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Task: {'id': task-2944825, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155655} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.364188] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.366543] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.366543] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.366543] env[68437]: INFO nova.compute.manager [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1097.366543] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1097.366543] env[68437]: DEBUG nova.compute.manager [-] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1097.366543] env[68437]: DEBUG nova.network.neutron [-] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1097.402926] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3084059b-b108-46fc-ace4-bb0eeaaec392 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.414035] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f558c7-3f54-4e2d-88ed-95f77f8f802b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.450629] env[68437]: DEBUG nova.network.neutron [req-6b4a822e-b376-4f5d-98fa-493fdef352b8 req-921bfe2f-a99f-47b8-8671-89be038d0d14 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Updated VIF entry in instance network info cache for port c58ce980-01f0-476a-b297-adac9a7fcdef. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1097.451088] env[68437]: DEBUG nova.network.neutron [req-6b4a822e-b376-4f5d-98fa-493fdef352b8 req-921bfe2f-a99f-47b8-8671-89be038d0d14 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Updating instance_info_cache with network_info: [{"id": "c58ce980-01f0-476a-b297-adac9a7fcdef", "address": "fa:16:3e:3f:d4:d1", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc58ce980-01", "ovs_interfaceid": "c58ce980-01f0-476a-b297-adac9a7fcdef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.452864] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3dd8bd2-1b73-4721-af0e-5550ebae3c76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.474692] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90e8291-f74d-428d-b14a-b49ad2e8f7be {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.478961] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944821, 'name': CloneVM_Task, 'duration_secs': 1.270662} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.482661] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Created linked-clone VM from snapshot [ 1097.483378] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944826, 'name': Rename_Task, 'duration_secs': 0.169186} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.484029] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248d37bc-f630-42de-a3ae-587c0b847eba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.486794] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1097.495872] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b092f236-c9cc-46b7-9da5-72cd0da2801d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.497692] env[68437]: DEBUG nova.compute.provider_tree [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.507056] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Uploading image 7e2bf87d-fe8a-4380-a83e-2ec60b802467 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1097.512749] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1097.512749] env[68437]: value = "task-2944827" [ 1097.512749] env[68437]: _type = "Task" [ 1097.512749] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.525747] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944827, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.546210] env[68437]: DEBUG oslo_vmware.rw_handles [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1097.546210] env[68437]: value = "vm-591104" [ 1097.546210] env[68437]: _type = "VirtualMachine" [ 1097.546210] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1097.546567] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d1cf7b26-1f22-4203-b7d8-2b41ee9068bb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.555962] env[68437]: DEBUG oslo_vmware.rw_handles [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lease: (returnval){ [ 1097.555962] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520f98d1-c4db-8266-d532-e177f64a8628" [ 1097.555962] env[68437]: _type = "HttpNfcLease" [ 1097.555962] env[68437]: } obtained for exporting VM: (result){ [ 1097.555962] env[68437]: value = "vm-591104" [ 1097.555962] env[68437]: _type = "VirtualMachine" [ 1097.555962] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1097.556314] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the lease: (returnval){ [ 1097.556314] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520f98d1-c4db-8266-d532-e177f64a8628" [ 1097.556314] env[68437]: _type = "HttpNfcLease" [ 1097.556314] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1097.565150] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1097.565150] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520f98d1-c4db-8266-d532-e177f64a8628" [ 1097.565150] env[68437]: _type = "HttpNfcLease" [ 1097.565150] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1097.812625] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b838fa-a4df-e402-89ce-d90d86ea5a9b, 'name': SearchDatastore_Task, 'duration_secs': 0.02441} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.813517] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4f039c8-cef2-4507-960d-df73be4a77f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.819861] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1097.819861] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528889e0-f14a-22c2-0b87-ccb416edbda8" [ 1097.819861] env[68437]: _type = "Task" [ 1097.819861] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.829314] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528889e0-f14a-22c2-0b87-ccb416edbda8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.960458] env[68437]: DEBUG oslo_concurrency.lockutils [req-6b4a822e-b376-4f5d-98fa-493fdef352b8 req-921bfe2f-a99f-47b8-8671-89be038d0d14 service nova] Releasing lock "refresh_cache-4abf1477-2f0e-4a13-884a-c19420b3e435" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1097.997551] env[68437]: DEBUG nova.compute.manager [req-03dc19f3-7165-4ef3-b5e9-3931f0fd2865 req-070da191-937c-4ecd-953a-2977784e5017 service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Received event network-vif-deleted-9fbfd56e-861b-488a-afc9-9efe25097c73 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1097.997764] env[68437]: INFO nova.compute.manager [req-03dc19f3-7165-4ef3-b5e9-3931f0fd2865 req-070da191-937c-4ecd-953a-2977784e5017 service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Neutron deleted interface 9fbfd56e-861b-488a-afc9-9efe25097c73; detaching it from the instance and deleting it from the info cache [ 1097.997940] env[68437]: DEBUG nova.network.neutron [req-03dc19f3-7165-4ef3-b5e9-3931f0fd2865 req-070da191-937c-4ecd-953a-2977784e5017 service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.000722] env[68437]: DEBUG nova.scheduler.client.report [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.023640] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944827, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.064343] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1098.064343] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520f98d1-c4db-8266-d532-e177f64a8628" [ 1098.064343] env[68437]: _type = "HttpNfcLease" [ 1098.064343] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1098.064652] env[68437]: DEBUG oslo_vmware.rw_handles [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1098.064652] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520f98d1-c4db-8266-d532-e177f64a8628" [ 1098.064652] env[68437]: _type = "HttpNfcLease" [ 1098.064652] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1098.065592] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14063fd-73d4-4b8a-a9d2-a736e0a67eec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.073405] env[68437]: DEBUG oslo_vmware.rw_handles [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c5f19-760d-f038-d7cb-aabc9464f3f2/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1098.073405] env[68437]: DEBUG oslo_vmware.rw_handles [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c5f19-760d-f038-d7cb-aabc9464f3f2/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1098.140651] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.140847] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.326957] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f0562087-b1a4-4c40-b71e-52fd7231ac05 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.332363] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528889e0-f14a-22c2-0b87-ccb416edbda8, 'name': SearchDatastore_Task, 'duration_secs': 0.025221} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.333571] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.334100] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 9d54d4b6-9b92-4a24-9582-475108bf2710/9d54d4b6-9b92-4a24-9582-475108bf2710.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1098.334203] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-550e8aae-00e9-4e0c-b5c7-a3a944c1d888 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.340017] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1098.340017] env[68437]: value = "task-2944829" [ 1098.340017] env[68437]: _type = "Task" [ 1098.340017] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.349688] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.437159] env[68437]: DEBUG nova.network.neutron [-] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.507825] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.507825] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-acfbcd4f-52fd-4a47-8acd-605a93c95267 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.510401] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.607s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.510685] env[68437]: DEBUG nova.objects.instance [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lazy-loading 'resources' on Instance uuid b81a414d-51bf-4f08-b0d3-a19a7aa4efe5 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.526408] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bbddd7-13f0-41af-b9ae-525fa3436a7b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.542529] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944827, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.551086] env[68437]: INFO nova.scheduler.client.report [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Deleted allocations for instance 27429c12-ce0a-4e21-ac1b-6862a8063a9f [ 1098.574616] env[68437]: DEBUG nova.compute.manager [req-03dc19f3-7165-4ef3-b5e9-3931f0fd2865 req-070da191-937c-4ecd-953a-2977784e5017 service nova] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Detach interface failed, port_id=9fbfd56e-861b-488a-afc9-9efe25097c73, reason: Instance 19dde8dd-eae6-41a0-b147-c505db1cda15 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1098.643850] env[68437]: DEBUG nova.compute.manager [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1098.853641] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509299} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.854351] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 9d54d4b6-9b92-4a24-9582-475108bf2710/9d54d4b6-9b92-4a24-9582-475108bf2710.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1098.854501] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1098.855829] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5672b32f-edeb-4822-bd20-fad48cacdd89 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.861380] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1098.861380] env[68437]: value = "task-2944830" [ 1098.861380] env[68437]: _type = "Task" [ 1098.861380] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.869559] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.940915] env[68437]: INFO nova.compute.manager [-] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Took 1.57 seconds to deallocate network for instance. [ 1099.026241] env[68437]: DEBUG oslo_vmware.api [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2944827, 'name': PowerOnVM_Task, 'duration_secs': 1.136379} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.026695] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1099.027391] env[68437]: INFO nova.compute.manager [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Took 9.41 seconds to spawn the instance on the hypervisor. [ 1099.027391] env[68437]: DEBUG nova.compute.manager [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1099.028516] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549b2fd6-d693-4c31-8b2e-6198e63a7332 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.062064] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7d82f5e0-3f85-4d9d-b79b-737e89d813af tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "27429c12-ce0a-4e21-ac1b-6862a8063a9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.411s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.166816] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.258407] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d58089e-5444-473e-b4f1-55188d508b51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.266843] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcab9262-feea-4732-b73a-9038d2df7486 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.301183] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb774c2-24b8-4b6e-9e41-5de54b004b5e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.308672] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d8a378-ebcf-43f7-9887-cfc07b45a3f4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.322855] env[68437]: DEBUG nova.compute.provider_tree [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.371268] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086371} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.371560] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1099.372343] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789a2f77-b14c-4afa-b64b-5c12c2f4e614 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.393945] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 9d54d4b6-9b92-4a24-9582-475108bf2710/9d54d4b6-9b92-4a24-9582-475108bf2710.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.394367] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a7fddf6-235a-4c71-9761-3fe54412461c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.416695] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1099.416695] env[68437]: value = "task-2944831" [ 1099.416695] env[68437]: _type = "Task" [ 1099.416695] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.425795] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944831, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.447206] env[68437]: DEBUG oslo_concurrency.lockutils [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.545941] env[68437]: INFO nova.compute.manager [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Took 15.33 seconds to build instance. [ 1099.825351] env[68437]: DEBUG nova.scheduler.client.report [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.929086] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944831, 'name': ReconfigVM_Task, 'duration_secs': 0.315158} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.929325] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 9d54d4b6-9b92-4a24-9582-475108bf2710/9d54d4b6-9b92-4a24-9582-475108bf2710.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.930742] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d565f759-eddc-44ed-bfbc-d93cf936ebaa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.936306] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1099.936306] env[68437]: value = "task-2944832" [ 1099.936306] env[68437]: _type = "Task" [ 1099.936306] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.944521] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944832, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.048017] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a9e020c6-63d9-43b3-a625-4749f9902c6c tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "191b441c-2c9f-48f9-b83a-d539722e6375" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.852s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.331186] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.821s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.333655] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.594s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.333814] env[68437]: DEBUG nova.objects.instance [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lazy-loading 'resources' on Instance uuid ccad008b-0a3a-4234-9c4c-c3a5230a938e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.364288] env[68437]: INFO nova.scheduler.client.report [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Deleted allocations for instance b81a414d-51bf-4f08-b0d3-a19a7aa4efe5 [ 1100.447376] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944832, 'name': Rename_Task, 'duration_secs': 0.219581} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.447376] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1100.447376] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27072ea1-8caa-4e43-abbf-0b7431e21d6a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.453212] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1100.453212] env[68437]: value = "task-2944833" [ 1100.453212] env[68437]: _type = "Task" [ 1100.453212] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.462723] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944833, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.877654] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f622054f-6b0d-420e-bb8c-c8ba368392e8 tempest-ListServersNegativeTestJSON-130245478 tempest-ListServersNegativeTestJSON-130245478-project-member] Lock "b81a414d-51bf-4f08-b0d3-a19a7aa4efe5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.360s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.962065] env[68437]: DEBUG oslo_vmware.api [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944833, 'name': PowerOnVM_Task, 'duration_secs': 0.470756} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.962379] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1100.962572] env[68437]: INFO nova.compute.manager [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Took 8.91 seconds to spawn the instance on the hypervisor. [ 1100.962749] env[68437]: DEBUG nova.compute.manager [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1100.963666] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a335752-b65a-434f-ae48-3ce062b1edf5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.123567] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4495dec3-c091-4351-8891-dc5125197597 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.133514] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c984a4-5ef6-4d2a-80ae-ab5e0fc66a4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.171493] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597bcf68-21cc-4d40-a0d8-7602de8de640 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.181758] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce65708e-5b9b-43f9-83da-2d64b2023c0a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.207786] env[68437]: DEBUG nova.compute.provider_tree [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.490821] env[68437]: INFO nova.compute.manager [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Took 16.30 seconds to build instance. [ 1101.576791] env[68437]: DEBUG nova.compute.manager [req-64a02a5c-c292-4e4a-86e1-5050271909e7 req-ff4f3677-bc85-4a84-94df-5c6055e960d5 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Received event network-changed-a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1101.576994] env[68437]: DEBUG nova.compute.manager [req-64a02a5c-c292-4e4a-86e1-5050271909e7 req-ff4f3677-bc85-4a84-94df-5c6055e960d5 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Refreshing instance network info cache due to event network-changed-a072479e-e965-4e09-a378-229474b176e6. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1101.577849] env[68437]: DEBUG oslo_concurrency.lockutils [req-64a02a5c-c292-4e4a-86e1-5050271909e7 req-ff4f3677-bc85-4a84-94df-5c6055e960d5 service nova] Acquiring lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.577849] env[68437]: DEBUG oslo_concurrency.lockutils [req-64a02a5c-c292-4e4a-86e1-5050271909e7 req-ff4f3677-bc85-4a84-94df-5c6055e960d5 service nova] Acquired lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.577849] env[68437]: DEBUG nova.network.neutron [req-64a02a5c-c292-4e4a-86e1-5050271909e7 req-ff4f3677-bc85-4a84-94df-5c6055e960d5 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Refreshing network info cache for port a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1101.712663] env[68437]: DEBUG nova.scheduler.client.report [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.971261] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.971857] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.995303] env[68437]: DEBUG oslo_concurrency.lockutils [None req-54c4a651-da83-4a2c-aae9-80456b30e2d6 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "9d54d4b6-9b92-4a24-9582-475108bf2710" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.809s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.217676] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.223304] env[68437]: DEBUG oslo_concurrency.lockutils [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.826s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.223304] env[68437]: DEBUG nova.objects.instance [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lazy-loading 'resources' on Instance uuid 8d87308a-5583-4785-9f10-13a6f9b5fe98 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.239892] env[68437]: INFO nova.scheduler.client.report [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Deleted allocations for instance ccad008b-0a3a-4234-9c4c-c3a5230a938e [ 1102.325536] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquiring lock "9d54d4b6-9b92-4a24-9582-475108bf2710" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.325775] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "9d54d4b6-9b92-4a24-9582-475108bf2710" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.326062] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquiring lock "9d54d4b6-9b92-4a24-9582-475108bf2710-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.326256] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "9d54d4b6-9b92-4a24-9582-475108bf2710-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.326455] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "9d54d4b6-9b92-4a24-9582-475108bf2710-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.328103] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquiring lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.328352] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.329613] env[68437]: INFO nova.compute.manager [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Terminating instance [ 1102.475436] env[68437]: DEBUG nova.network.neutron [req-64a02a5c-c292-4e4a-86e1-5050271909e7 req-ff4f3677-bc85-4a84-94df-5c6055e960d5 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updated VIF entry in instance network info cache for port a072479e-e965-4e09-a378-229474b176e6. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1102.475436] env[68437]: DEBUG nova.network.neutron [req-64a02a5c-c292-4e4a-86e1-5050271909e7 req-ff4f3677-bc85-4a84-94df-5c6055e960d5 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updating instance_info_cache with network_info: [{"id": "a072479e-e965-4e09-a378-229474b176e6", "address": "fa:16:3e:15:03:44", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa072479e-e9", "ovs_interfaceid": "a072479e-e965-4e09-a378-229474b176e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.481349] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.481552] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.481723] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.481920] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.481988] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.482178] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.482268] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1102.482451] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.760820] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8f240cc0-fecb-4b28-8738-434aa49b18a1 tempest-ServersTestFqdnHostnames-725388464 tempest-ServersTestFqdnHostnames-725388464-project-member] Lock "ccad008b-0a3a-4234-9c4c-c3a5230a938e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.187s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.833144] env[68437]: DEBUG nova.compute.manager [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1102.839933] env[68437]: DEBUG nova.compute.manager [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1102.840419] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1102.842277] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84552f1-988d-4c17-ac41-db673e801305 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.854266] env[68437]: DEBUG nova.compute.manager [req-58905f0f-836b-49b2-b1a7-58dd940d79d2 req-54e30ace-6c78-43e6-9088-827e2fedd84f service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Received event network-changed-94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1102.854266] env[68437]: DEBUG nova.compute.manager [req-58905f0f-836b-49b2-b1a7-58dd940d79d2 req-54e30ace-6c78-43e6-9088-827e2fedd84f service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Refreshing instance network info cache due to event network-changed-94e0ca43-56a9-44cc-b9f1-0fa484701ba2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1102.854266] env[68437]: DEBUG oslo_concurrency.lockutils [req-58905f0f-836b-49b2-b1a7-58dd940d79d2 req-54e30ace-6c78-43e6-9088-827e2fedd84f service nova] Acquiring lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.854445] env[68437]: DEBUG oslo_concurrency.lockutils [req-58905f0f-836b-49b2-b1a7-58dd940d79d2 req-54e30ace-6c78-43e6-9088-827e2fedd84f service nova] Acquired lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.854611] env[68437]: DEBUG nova.network.neutron [req-58905f0f-836b-49b2-b1a7-58dd940d79d2 req-54e30ace-6c78-43e6-9088-827e2fedd84f service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Refreshing network info cache for port 94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1102.862483] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1102.863394] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b56167eb-3b33-4e55-b3fd-f76926e7ff2c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.871531] env[68437]: DEBUG oslo_vmware.api [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1102.871531] env[68437]: value = "task-2944834" [ 1102.871531] env[68437]: _type = "Task" [ 1102.871531] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.885099] env[68437]: DEBUG oslo_vmware.api [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.982296] env[68437]: DEBUG oslo_concurrency.lockutils [req-64a02a5c-c292-4e4a-86e1-5050271909e7 req-ff4f3677-bc85-4a84-94df-5c6055e960d5 service nova] Releasing lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.985430] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.986489] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b07d914-c803-48c6-876d-3c04d12def3f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.997982] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd42e302-14ab-4887-8ee7-421fee66d453 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.040707] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd46a076-dd50-419d-bdf6-49654042efbb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.050450] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d533fc0e-e391-448e-a9cc-4a79db385ede {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.068712] env[68437]: DEBUG nova.compute.provider_tree [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.359712] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.388140] env[68437]: DEBUG oslo_vmware.api [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944834, 'name': PowerOffVM_Task, 'duration_secs': 0.191872} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.388487] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1103.388668] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1103.388938] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab55920c-a8a8-4d1c-87ce-7da6cd2e6bfc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.474203] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1103.474515] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1103.474694] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Deleting the datastore file [datastore1] 9d54d4b6-9b92-4a24-9582-475108bf2710 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1103.475051] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c950f1ea-07ac-43b1-9404-a7f0e3ec7bfb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.483602] env[68437]: DEBUG oslo_vmware.api [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for the task: (returnval){ [ 1103.483602] env[68437]: value = "task-2944836" [ 1103.483602] env[68437]: _type = "Task" [ 1103.483602] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.492451] env[68437]: DEBUG oslo_vmware.api [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.570725] env[68437]: DEBUG nova.scheduler.client.report [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.575389] env[68437]: DEBUG nova.network.neutron [req-58905f0f-836b-49b2-b1a7-58dd940d79d2 req-54e30ace-6c78-43e6-9088-827e2fedd84f service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Updated VIF entry in instance network info cache for port 94e0ca43-56a9-44cc-b9f1-0fa484701ba2. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1103.575843] env[68437]: DEBUG nova.network.neutron [req-58905f0f-836b-49b2-b1a7-58dd940d79d2 req-54e30ace-6c78-43e6-9088-827e2fedd84f service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Updating instance_info_cache with network_info: [{"id": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "address": "fa:16:3e:96:2b:b0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e0ca43-56", "ovs_interfaceid": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.615153] env[68437]: DEBUG nova.compute.manager [req-0941060a-f995-4095-9205-31f96e7a7dae req-3c356f06-95b2-4369-b6b5-ca6bcbe15218 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Received event network-changed-94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1103.615657] env[68437]: DEBUG nova.compute.manager [req-0941060a-f995-4095-9205-31f96e7a7dae req-3c356f06-95b2-4369-b6b5-ca6bcbe15218 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Refreshing instance network info cache due to event network-changed-94e0ca43-56a9-44cc-b9f1-0fa484701ba2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1103.615657] env[68437]: DEBUG oslo_concurrency.lockutils [req-0941060a-f995-4095-9205-31f96e7a7dae req-3c356f06-95b2-4369-b6b5-ca6bcbe15218 service nova] Acquiring lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.994021] env[68437]: DEBUG oslo_vmware.api [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Task: {'id': task-2944836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20135} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.994358] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1103.994358] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1103.996970] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1103.996970] env[68437]: INFO nova.compute.manager [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1103.996970] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1103.996970] env[68437]: DEBUG nova.compute.manager [-] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1103.996970] env[68437]: DEBUG nova.network.neutron [-] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1104.078581] env[68437]: DEBUG oslo_concurrency.lockutils [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.858s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.081306] env[68437]: DEBUG oslo_concurrency.lockutils [req-58905f0f-836b-49b2-b1a7-58dd940d79d2 req-54e30ace-6c78-43e6-9088-827e2fedd84f service nova] Releasing lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.082208] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.916s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.084174] env[68437]: INFO nova.compute.claims [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1104.091605] env[68437]: DEBUG oslo_concurrency.lockutils [req-0941060a-f995-4095-9205-31f96e7a7dae req-3c356f06-95b2-4369-b6b5-ca6bcbe15218 service nova] Acquired lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.091755] env[68437]: DEBUG nova.network.neutron [req-0941060a-f995-4095-9205-31f96e7a7dae req-3c356f06-95b2-4369-b6b5-ca6bcbe15218 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Refreshing network info cache for port 94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1104.117431] env[68437]: INFO nova.scheduler.client.report [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Deleted allocations for instance 8d87308a-5583-4785-9f10-13a6f9b5fe98 [ 1104.627108] env[68437]: DEBUG oslo_concurrency.lockutils [None req-09a1fa99-9c5a-4a6c-add2-f774dd4cf728 tempest-ServerMetadataNegativeTestJSON-1986821320 tempest-ServerMetadataNegativeTestJSON-1986821320-project-member] Lock "8d87308a-5583-4785-9f10-13a6f9b5fe98" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.640s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.836342] env[68437]: DEBUG nova.network.neutron [-] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.889218] env[68437]: DEBUG nova.network.neutron [req-0941060a-f995-4095-9205-31f96e7a7dae req-3c356f06-95b2-4369-b6b5-ca6bcbe15218 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Updated VIF entry in instance network info cache for port 94e0ca43-56a9-44cc-b9f1-0fa484701ba2. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1104.889218] env[68437]: DEBUG nova.network.neutron [req-0941060a-f995-4095-9205-31f96e7a7dae req-3c356f06-95b2-4369-b6b5-ca6bcbe15218 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Updating instance_info_cache with network_info: [{"id": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "address": "fa:16:3e:96:2b:b0", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e0ca43-56", "ovs_interfaceid": "94e0ca43-56a9-44cc-b9f1-0fa484701ba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.896817] env[68437]: DEBUG nova.compute.manager [req-0ca95fba-f9f5-40a3-a02a-f9432c55c585 req-1dd83189-54be-4e24-9991-a568ed676182 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Received event network-changed-a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1104.897015] env[68437]: DEBUG nova.compute.manager [req-0ca95fba-f9f5-40a3-a02a-f9432c55c585 req-1dd83189-54be-4e24-9991-a568ed676182 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Refreshing instance network info cache due to event network-changed-a072479e-e965-4e09-a378-229474b176e6. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1104.897243] env[68437]: DEBUG oslo_concurrency.lockutils [req-0ca95fba-f9f5-40a3-a02a-f9432c55c585 req-1dd83189-54be-4e24-9991-a568ed676182 service nova] Acquiring lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.897390] env[68437]: DEBUG oslo_concurrency.lockutils [req-0ca95fba-f9f5-40a3-a02a-f9432c55c585 req-1dd83189-54be-4e24-9991-a568ed676182 service nova] Acquired lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.897556] env[68437]: DEBUG nova.network.neutron [req-0ca95fba-f9f5-40a3-a02a-f9432c55c585 req-1dd83189-54be-4e24-9991-a568ed676182 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Refreshing network info cache for port a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1105.316835] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88468d35-9609-4079-8fa7-19383735d499 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.324956] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e35c3e8-0e3a-4df3-80af-dc757b00df82 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.355480] env[68437]: INFO nova.compute.manager [-] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Took 1.36 seconds to deallocate network for instance. [ 1105.357982] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059ba1b1-d3d4-4779-ab9f-ae892b379beb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.368382] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc98b702-6a8a-4fc4-8b3c-76a220c3b6a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.382782] env[68437]: DEBUG nova.compute.provider_tree [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.400325] env[68437]: DEBUG oslo_concurrency.lockutils [req-0941060a-f995-4095-9205-31f96e7a7dae req-3c356f06-95b2-4369-b6b5-ca6bcbe15218 service nova] Releasing lock "refresh_cache-191b441c-2c9f-48f9-b83a-d539722e6375" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.830661] env[68437]: DEBUG nova.network.neutron [req-0ca95fba-f9f5-40a3-a02a-f9432c55c585 req-1dd83189-54be-4e24-9991-a568ed676182 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updated VIF entry in instance network info cache for port a072479e-e965-4e09-a378-229474b176e6. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1105.830661] env[68437]: DEBUG nova.network.neutron [req-0ca95fba-f9f5-40a3-a02a-f9432c55c585 req-1dd83189-54be-4e24-9991-a568ed676182 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updating instance_info_cache with network_info: [{"id": "a072479e-e965-4e09-a378-229474b176e6", "address": "fa:16:3e:15:03:44", "network": {"id": "6b32ee7d-bcd2-457d-b9fa-0014ec1c1a88", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2133058254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68baf1daffa842b4adb854fe0cec9524", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa072479e-e9", "ovs_interfaceid": "a072479e-e965-4e09-a378-229474b176e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.865391] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1105.889208] env[68437]: DEBUG nova.scheduler.client.report [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1106.333332] env[68437]: DEBUG oslo_concurrency.lockutils [req-0ca95fba-f9f5-40a3-a02a-f9432c55c585 req-1dd83189-54be-4e24-9991-a568ed676182 service nova] Releasing lock "refresh_cache-e2143e07-8c8d-4008-bb73-29aae91baee7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.333614] env[68437]: DEBUG nova.compute.manager [req-0ca95fba-f9f5-40a3-a02a-f9432c55c585 req-1dd83189-54be-4e24-9991-a568ed676182 service nova] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Received event network-vif-deleted-9e63795b-1911-4125-b17f-8b478af321a0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1106.393838] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.311s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.394426] env[68437]: DEBUG nova.compute.manager [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1106.401040] env[68437]: DEBUG oslo_concurrency.lockutils [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.954s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.401318] env[68437]: DEBUG nova.objects.instance [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lazy-loading 'resources' on Instance uuid 19dde8dd-eae6-41a0-b147-c505db1cda15 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1106.903791] env[68437]: DEBUG nova.compute.utils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1106.907495] env[68437]: DEBUG nova.compute.manager [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1106.907495] env[68437]: DEBUG nova.network.neutron [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1106.951356] env[68437]: DEBUG nova.policy [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6801cab23bf4aadb8d7f326f0643c32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73e8290afeb84bf3976cfa22d3452ca7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1107.117303] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9998fc7-fb64-44e5-9477-855cf0e083a4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.126620] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a95a30a-d720-4fa3-a0ed-ab429a731fea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.162353] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9af7e0-7a61-4c9a-8355-c3bc3091154d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.171736] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ddb655-07b0-43dd-9704-d2f2deb7f92a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.189177] env[68437]: DEBUG nova.compute.provider_tree [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.339119] env[68437]: DEBUG nova.network.neutron [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Successfully created port: d8383639-3dc6-429a-84bb-d34c1c98e26d {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1107.409684] env[68437]: DEBUG nova.compute.manager [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1107.697294] env[68437]: DEBUG nova.scheduler.client.report [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.202711] env[68437]: DEBUG oslo_concurrency.lockutils [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.205388] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.220s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.205493] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.205617] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1108.206043] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.846s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.207498] env[68437]: INFO nova.compute.claims [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.214307] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25dae69-2eb8-4d5f-ab22-337e888f9fe2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.228332] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b67be10-1e88-4a0e-9794-1c2121ed150d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.232277] env[68437]: INFO nova.scheduler.client.report [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Deleted allocations for instance 19dde8dd-eae6-41a0-b147-c505db1cda15 [ 1108.248719] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af1ef5a-4887-470b-963b-e4adbae460cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.258249] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fb5304-c616-489b-9bfb-292b3482d466 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.291081] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179420MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1108.291081] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.423342] env[68437]: DEBUG nova.compute.manager [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1108.449589] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1108.449589] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1108.449589] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1108.449589] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1108.449589] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1108.449948] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1108.449948] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1108.450075] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1108.450673] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1108.450673] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1108.450673] env[68437]: DEBUG nova.virt.hardware [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1108.451550] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8eea6f-c9c3-4b41-9936-23b979fa7caa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.463203] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c81a1a-37dd-4e8a-b675-e66f86c765a6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.577454] env[68437]: DEBUG oslo_vmware.rw_handles [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c5f19-760d-f038-d7cb-aabc9464f3f2/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1108.578530] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb878ce2-4bca-43f5-af5a-77ee88c5dff2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.585917] env[68437]: DEBUG oslo_vmware.rw_handles [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c5f19-760d-f038-d7cb-aabc9464f3f2/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1108.586191] env[68437]: ERROR oslo_vmware.rw_handles [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c5f19-760d-f038-d7cb-aabc9464f3f2/disk-0.vmdk due to incomplete transfer. [ 1108.586469] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b1f8a039-dd32-4888-82d3-e0028a37d1b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.594594] env[68437]: DEBUG oslo_vmware.rw_handles [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c5f19-760d-f038-d7cb-aabc9464f3f2/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1108.594825] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Uploaded image 7e2bf87d-fe8a-4380-a83e-2ec60b802467 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1108.597134] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1108.597398] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b99a6c1a-45ad-4dd6-bf02-533a9abae057 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.604326] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1108.604326] env[68437]: value = "task-2944837" [ 1108.604326] env[68437]: _type = "Task" [ 1108.604326] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.616066] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944837, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.755821] env[68437]: DEBUG oslo_concurrency.lockutils [None req-533ca922-c8e9-4285-ac11-8c75a87886cb tempest-ServerRescueNegativeTestJSON-1750851624 tempest-ServerRescueNegativeTestJSON-1750851624-project-member] Lock "19dde8dd-eae6-41a0-b147-c505db1cda15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.033s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.984967] env[68437]: DEBUG nova.compute.manager [req-6e5db0fd-18c1-4b32-919b-298f282fe1ab req-a740e907-08ca-4b89-ada0-e78833f39749 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Received event network-vif-plugged-d8383639-3dc6-429a-84bb-d34c1c98e26d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1108.985227] env[68437]: DEBUG oslo_concurrency.lockutils [req-6e5db0fd-18c1-4b32-919b-298f282fe1ab req-a740e907-08ca-4b89-ada0-e78833f39749 service nova] Acquiring lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.985302] env[68437]: DEBUG oslo_concurrency.lockutils [req-6e5db0fd-18c1-4b32-919b-298f282fe1ab req-a740e907-08ca-4b89-ada0-e78833f39749 service nova] Lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.985472] env[68437]: DEBUG oslo_concurrency.lockutils [req-6e5db0fd-18c1-4b32-919b-298f282fe1ab req-a740e907-08ca-4b89-ada0-e78833f39749 service nova] Lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.986817] env[68437]: DEBUG nova.compute.manager [req-6e5db0fd-18c1-4b32-919b-298f282fe1ab req-a740e907-08ca-4b89-ada0-e78833f39749 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] No waiting events found dispatching network-vif-plugged-d8383639-3dc6-429a-84bb-d34c1c98e26d {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1108.986917] env[68437]: WARNING nova.compute.manager [req-6e5db0fd-18c1-4b32-919b-298f282fe1ab req-a740e907-08ca-4b89-ada0-e78833f39749 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Received unexpected event network-vif-plugged-d8383639-3dc6-429a-84bb-d34c1c98e26d for instance with vm_state building and task_state spawning. [ 1109.077548] env[68437]: DEBUG nova.network.neutron [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Successfully updated port: d8383639-3dc6-429a-84bb-d34c1c98e26d {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1109.118127] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944837, 'name': Destroy_Task, 'duration_secs': 0.358494} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.118127] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Destroyed the VM [ 1109.118127] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1109.118127] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a6eee307-9d21-4bac-9f34-f7617bed8e23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.122404] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1109.122404] env[68437]: value = "task-2944838" [ 1109.122404] env[68437]: _type = "Task" [ 1109.122404] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.130188] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944838, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.388651] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.388900] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.435935] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8eab4bb-d984-4f92-a7c2-16019339fe4d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.445709] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081418bf-1351-4b87-92ff-45b463a0dbbe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.476348] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b3fa26c-f225-4564-b28b-37bdbb9de3de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.483853] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17a6f9e-4da3-4030-9592-bb35f10d8bc3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.499062] env[68437]: DEBUG nova.compute.provider_tree [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.585635] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "refresh_cache-ea09a88a-d426-4af4-aa07-945ccfbf2a24" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.585635] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "refresh_cache-ea09a88a-d426-4af4-aa07-945ccfbf2a24" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.585635] env[68437]: DEBUG nova.network.neutron [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1109.636370] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944838, 'name': RemoveSnapshot_Task, 'duration_secs': 0.357968} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.636740] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1109.637067] env[68437]: DEBUG nova.compute.manager [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1109.637922] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f3f772-2377-4531-98a5-ae9d95c8bb95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.890903] env[68437]: DEBUG nova.compute.manager [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1110.002332] env[68437]: DEBUG nova.scheduler.client.report [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.138503] env[68437]: DEBUG nova.network.neutron [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1110.153603] env[68437]: INFO nova.compute.manager [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Shelve offloading [ 1110.287592] env[68437]: DEBUG nova.network.neutron [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Updating instance_info_cache with network_info: [{"id": "d8383639-3dc6-429a-84bb-d34c1c98e26d", "address": "fa:16:3e:50:fe:b5", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8383639-3d", "ovs_interfaceid": "d8383639-3dc6-429a-84bb-d34c1c98e26d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.424470] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.508163] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.509374] env[68437]: DEBUG nova.compute.manager [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1110.515046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.649s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.515442] env[68437]: DEBUG nova.objects.instance [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lazy-loading 'resources' on Instance uuid 9d54d4b6-9b92-4a24-9582-475108bf2710 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.663129] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1110.663983] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17a7b30a-5e68-4dc7-aaea-c81e7a89becb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.671739] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1110.671739] env[68437]: value = "task-2944842" [ 1110.671739] env[68437]: _type = "Task" [ 1110.671739] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.683681] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1110.683681] env[68437]: DEBUG nova.compute.manager [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1110.685216] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60be1b43-b978-4f55-ab02-dca5fcfdc41b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.692626] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.692838] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.692979] env[68437]: DEBUG nova.network.neutron [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1110.792233] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "refresh_cache-ea09a88a-d426-4af4-aa07-945ccfbf2a24" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.792800] env[68437]: DEBUG nova.compute.manager [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Instance network_info: |[{"id": "d8383639-3dc6-429a-84bb-d34c1c98e26d", "address": "fa:16:3e:50:fe:b5", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8383639-3d", "ovs_interfaceid": "d8383639-3dc6-429a-84bb-d34c1c98e26d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1110.793325] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:fe:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '305ccd93-08cb-4658-845c-d9b64952daf7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd8383639-3dc6-429a-84bb-d34c1c98e26d', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1110.801629] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.801862] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1110.802124] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24afee02-c467-48fd-b753-7e24bd57b11d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.823111] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1110.823111] env[68437]: value = "task-2944843" [ 1110.823111] env[68437]: _type = "Task" [ 1110.823111] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.833536] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944843, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.019262] env[68437]: DEBUG nova.compute.utils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1111.023928] env[68437]: DEBUG nova.compute.manager [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1111.024117] env[68437]: DEBUG nova.network.neutron [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1111.035606] env[68437]: DEBUG nova.compute.manager [req-37618396-376a-4fd3-a4cb-b794689375ca req-cab94675-774a-420d-ac89-dfee28c3fc9f service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Received event network-changed-d8383639-3dc6-429a-84bb-d34c1c98e26d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1111.035870] env[68437]: DEBUG nova.compute.manager [req-37618396-376a-4fd3-a4cb-b794689375ca req-cab94675-774a-420d-ac89-dfee28c3fc9f service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Refreshing instance network info cache due to event network-changed-d8383639-3dc6-429a-84bb-d34c1c98e26d. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1111.036183] env[68437]: DEBUG oslo_concurrency.lockutils [req-37618396-376a-4fd3-a4cb-b794689375ca req-cab94675-774a-420d-ac89-dfee28c3fc9f service nova] Acquiring lock "refresh_cache-ea09a88a-d426-4af4-aa07-945ccfbf2a24" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.036361] env[68437]: DEBUG oslo_concurrency.lockutils [req-37618396-376a-4fd3-a4cb-b794689375ca req-cab94675-774a-420d-ac89-dfee28c3fc9f service nova] Acquired lock "refresh_cache-ea09a88a-d426-4af4-aa07-945ccfbf2a24" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.036568] env[68437]: DEBUG nova.network.neutron [req-37618396-376a-4fd3-a4cb-b794689375ca req-cab94675-774a-420d-ac89-dfee28c3fc9f service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Refreshing network info cache for port d8383639-3dc6-429a-84bb-d34c1c98e26d {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1111.088574] env[68437]: DEBUG nova.policy [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35e8e347bb7e4e648e056282230d9f62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1971ad9ba5bd4d50a68495c8b2ab7341', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1111.212009] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735514d1-8b20-4fd3-a9f7-b1701a288be4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.219423] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f14d5b-d82e-4edd-8573-d8d1cb748be0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.251303] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb6991e-3e2b-41f3-b3ec-385b6c1e3e97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.260034] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf482ea-2bd3-4c47-9cf5-136ef31c613a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.272750] env[68437]: DEBUG nova.compute.provider_tree [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.333710] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944843, 'name': CreateVM_Task, 'duration_secs': 0.355283} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.336601] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1111.337293] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.337481] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.337831] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1111.338333] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f72e7b4d-567d-4cd8-9f81-12c7dcb95b9e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.343425] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1111.343425] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524a8c96-fb54-55bd-6c52-2941c43c0dd5" [ 1111.343425] env[68437]: _type = "Task" [ 1111.343425] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.351229] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524a8c96-fb54-55bd-6c52-2941c43c0dd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.352016] env[68437]: DEBUG nova.network.neutron [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Successfully created port: 94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1111.431593] env[68437]: DEBUG nova.network.neutron [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Updating instance_info_cache with network_info: [{"id": "63b32499-5908-4ff2-8386-9e979aee59c9", "address": "fa:16:3e:3b:f7:4a", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b32499-59", "ovs_interfaceid": "63b32499-5908-4ff2-8386-9e979aee59c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.525253] env[68437]: DEBUG nova.compute.manager [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1111.751648] env[68437]: DEBUG nova.network.neutron [req-37618396-376a-4fd3-a4cb-b794689375ca req-cab94675-774a-420d-ac89-dfee28c3fc9f service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Updated VIF entry in instance network info cache for port d8383639-3dc6-429a-84bb-d34c1c98e26d. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1111.752099] env[68437]: DEBUG nova.network.neutron [req-37618396-376a-4fd3-a4cb-b794689375ca req-cab94675-774a-420d-ac89-dfee28c3fc9f service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Updating instance_info_cache with network_info: [{"id": "d8383639-3dc6-429a-84bb-d34c1c98e26d", "address": "fa:16:3e:50:fe:b5", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8383639-3d", "ovs_interfaceid": "d8383639-3dc6-429a-84bb-d34c1c98e26d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.776568] env[68437]: DEBUG nova.scheduler.client.report [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1111.854160] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524a8c96-fb54-55bd-6c52-2941c43c0dd5, 'name': SearchDatastore_Task, 'duration_secs': 0.009687} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.854490] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.854739] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1111.854971] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.855127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.855308] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1111.855561] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca209607-f1c0-4b18-9486-81e71408eda1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.865025] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.865025] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1111.866410] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42ac1c75-4357-41ad-9184-032c6f695b6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.871278] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1111.871278] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ac12ae-67dd-8076-58c9-5f2721c48ebf" [ 1111.871278] env[68437]: _type = "Task" [ 1111.871278] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.878514] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ac12ae-67dd-8076-58c9-5f2721c48ebf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.934817] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.255042] env[68437]: DEBUG oslo_concurrency.lockutils [req-37618396-376a-4fd3-a4cb-b794689375ca req-cab94675-774a-420d-ac89-dfee28c3fc9f service nova] Releasing lock "refresh_cache-ea09a88a-d426-4af4-aa07-945ccfbf2a24" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.281111] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.767s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.283721] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.993s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.312756] env[68437]: INFO nova.scheduler.client.report [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Deleted allocations for instance 9d54d4b6-9b92-4a24-9582-475108bf2710 [ 1112.347462] env[68437]: DEBUG nova.compute.manager [req-a10bcf58-8301-4c52-bfac-d577f401424c req-88098669-ba5c-42d2-a298-56b7ef32a979 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Received event network-vif-unplugged-63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1112.347462] env[68437]: DEBUG oslo_concurrency.lockutils [req-a10bcf58-8301-4c52-bfac-d577f401424c req-88098669-ba5c-42d2-a298-56b7ef32a979 service nova] Acquiring lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.347462] env[68437]: DEBUG oslo_concurrency.lockutils [req-a10bcf58-8301-4c52-bfac-d577f401424c req-88098669-ba5c-42d2-a298-56b7ef32a979 service nova] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.347462] env[68437]: DEBUG oslo_concurrency.lockutils [req-a10bcf58-8301-4c52-bfac-d577f401424c req-88098669-ba5c-42d2-a298-56b7ef32a979 service nova] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.347462] env[68437]: DEBUG nova.compute.manager [req-a10bcf58-8301-4c52-bfac-d577f401424c req-88098669-ba5c-42d2-a298-56b7ef32a979 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] No waiting events found dispatching network-vif-unplugged-63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1112.347462] env[68437]: WARNING nova.compute.manager [req-a10bcf58-8301-4c52-bfac-d577f401424c req-88098669-ba5c-42d2-a298-56b7ef32a979 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Received unexpected event network-vif-unplugged-63b32499-5908-4ff2-8386-9e979aee59c9 for instance with vm_state shelved and task_state shelving_offloading. [ 1112.382708] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ac12ae-67dd-8076-58c9-5f2721c48ebf, 'name': SearchDatastore_Task, 'duration_secs': 0.009578} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.383530] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bb8fa89-ebf0-47fe-a3df-43122faec627 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.389589] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1112.389589] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520c8698-ed45-d533-f173-a0fe15465d65" [ 1112.389589] env[68437]: _type = "Task" [ 1112.389589] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.396956] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520c8698-ed45-d533-f173-a0fe15465d65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.408438] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.409185] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f3186d-3a58-45f2-b12b-b9c742507db7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.416418] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1112.417168] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b32f5046-109e-43fa-9702-80d6c3a7ff7c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.485517] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1112.485735] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1112.485907] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleting the datastore file [datastore2] 2b985ca2-f0d1-4937-aa33-154aa53b0a40 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1112.486197] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14cdc262-dd0b-4073-adca-7aac3bcf349f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.493444] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1112.493444] env[68437]: value = "task-2944845" [ 1112.493444] env[68437]: _type = "Task" [ 1112.493444] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.501354] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.539983] env[68437]: DEBUG nova.compute.manager [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1112.572220] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1112.572636] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1112.572770] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1112.573018] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1112.573227] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1112.573449] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1112.573718] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1112.573949] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1112.574195] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1112.574413] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1112.574674] env[68437]: DEBUG nova.virt.hardware [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1112.575697] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06435a24-aef9-4206-903b-754a0500bb19 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.585050] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a0bb4a-7f13-403e-b00a-31f8ad6a57b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.823450] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c53dbba1-ebf3-4813-87d7-42d5bfa3b224 tempest-ServersNegativeTestMultiTenantJSON-342835407 tempest-ServersNegativeTestMultiTenantJSON-342835407-project-member] Lock "9d54d4b6-9b92-4a24-9582-475108bf2710" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.497s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.909276] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520c8698-ed45-d533-f173-a0fe15465d65, 'name': SearchDatastore_Task, 'duration_secs': 0.011088} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.909571] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.909890] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ea09a88a-d426-4af4-aa07-945ccfbf2a24/ea09a88a-d426-4af4-aa07-945ccfbf2a24.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1112.910182] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a34417e-b610-43d1-b26f-2c1648ba4865 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.917164] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1112.917164] env[68437]: value = "task-2944846" [ 1112.917164] env[68437]: _type = "Task" [ 1112.917164] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.927274] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.004846] env[68437]: DEBUG oslo_vmware.api [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139663} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.005167] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.005559] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.005859] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.029029] env[68437]: INFO nova.scheduler.client.report [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted allocations for instance 2b985ca2-f0d1-4937-aa33-154aa53b0a40 [ 1113.063715] env[68437]: DEBUG nova.compute.manager [req-7ae08c41-1c8b-4567-b0e3-71806dde13e2 req-ab6444bb-dd6d-4b85-a772-7fe058b49da1 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Received event network-vif-plugged-94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1113.064016] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ae08c41-1c8b-4567-b0e3-71806dde13e2 req-ab6444bb-dd6d-4b85-a772-7fe058b49da1 service nova] Acquiring lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.064495] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ae08c41-1c8b-4567-b0e3-71806dde13e2 req-ab6444bb-dd6d-4b85-a772-7fe058b49da1 service nova] Lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.064672] env[68437]: DEBUG oslo_concurrency.lockutils [req-7ae08c41-1c8b-4567-b0e3-71806dde13e2 req-ab6444bb-dd6d-4b85-a772-7fe058b49da1 service nova] Lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.064850] env[68437]: DEBUG nova.compute.manager [req-7ae08c41-1c8b-4567-b0e3-71806dde13e2 req-ab6444bb-dd6d-4b85-a772-7fe058b49da1 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] No waiting events found dispatching network-vif-plugged-94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1113.065067] env[68437]: WARNING nova.compute.manager [req-7ae08c41-1c8b-4567-b0e3-71806dde13e2 req-ab6444bb-dd6d-4b85-a772-7fe058b49da1 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Received unexpected event network-vif-plugged-94228d13-e4c8-47f2-9bfa-37e85949fe81 for instance with vm_state building and task_state spawning. [ 1113.106491] env[68437]: DEBUG nova.network.neutron [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Successfully updated port: 94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1113.327098] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8ccd7176-55c0-4118-a07e-3c4bdbba9795 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.327098] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 353ebb37-7e69-49d4-873e-2272cbfff6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.327098] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ee0450b5-66ce-41ed-9f4f-7ffa7b46f769 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.327098] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance e2143e07-8c8d-4008-bb73-29aae91baee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.327098] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 76d97a56-21a2-4363-a987-ef872f056510 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.327098] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 4abf1477-2f0e-4a13-884a-c19420b3e435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.327098] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 191b441c-2c9f-48f9-b83a-d539722e6375 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.327098] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ea09a88a-d426-4af4-aa07-945ccfbf2a24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.327098] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance cdb5b8d0-03ab-4020-a9aa-00688f7aef8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1113.428736] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50021} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.429273] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ea09a88a-d426-4af4-aa07-945ccfbf2a24/ea09a88a-d426-4af4-aa07-945ccfbf2a24.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1113.429529] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1113.429876] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d7c1dbf-d012-4ce0-a866-a4874e775593 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.439739] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1113.439739] env[68437]: value = "task-2944847" [ 1113.439739] env[68437]: _type = "Task" [ 1113.439739] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.448720] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944847, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.535219] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.613641] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquiring lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.613952] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquired lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.613952] env[68437]: DEBUG nova.network.neutron [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1113.830427] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance aff861ed-e792-480a-811e-c157c0606d08 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1113.830701] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1113.830896] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1113.949603] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944847, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10083} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.952297] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1113.953675] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477b1a5e-7655-4b15-81d8-a561d24fa7da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.977061] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] ea09a88a-d426-4af4-aa07-945ccfbf2a24/ea09a88a-d426-4af4-aa07-945ccfbf2a24.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1113.980167] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77048e85-3b73-4dda-af49-93b5bd9d74a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.004273] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1114.004273] env[68437]: value = "task-2944849" [ 1114.004273] env[68437]: _type = "Task" [ 1114.004273] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.016608] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944849, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.028330] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d370d38c-820b-4a78-b6be-dda5cd05c23b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.036594] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8589c461-8452-445b-a4cf-c3c2485b4cd0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.067921] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009879a7-177e-4895-8fb6-6ddfe3439e56 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.076496] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3727f4-8bfe-41e7-b786-e82122d3d270 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.090445] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.198741] env[68437]: DEBUG nova.network.neutron [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1114.486623] env[68437]: DEBUG nova.network.neutron [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Updating instance_info_cache with network_info: [{"id": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "address": "fa:16:3e:cb:16:0c", "network": {"id": "a62e1a24-0149-4f49-9572-b9b3f0a73a40", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-912812525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1971ad9ba5bd4d50a68495c8b2ab7341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94228d13-e4", "ovs_interfaceid": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.516282] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944849, 'name': ReconfigVM_Task, 'duration_secs': 0.412322} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.516596] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Reconfigured VM instance instance-00000060 to attach disk [datastore1] ea09a88a-d426-4af4-aa07-945ccfbf2a24/ea09a88a-d426-4af4-aa07-945ccfbf2a24.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.517264] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3ce76bf-bbec-47ac-9df9-63e9851eb8ee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.525280] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1114.525280] env[68437]: value = "task-2944850" [ 1114.525280] env[68437]: _type = "Task" [ 1114.525280] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.536646] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944850, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.562440] env[68437]: DEBUG nova.compute.manager [req-18db477e-adec-4292-89c1-03d56f00061f req-c3f19bcb-60aa-446a-98f1-4aa54c875a13 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Received event network-changed-63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1114.563383] env[68437]: DEBUG nova.compute.manager [req-18db477e-adec-4292-89c1-03d56f00061f req-c3f19bcb-60aa-446a-98f1-4aa54c875a13 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Refreshing instance network info cache due to event network-changed-63b32499-5908-4ff2-8386-9e979aee59c9. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1114.563383] env[68437]: DEBUG oslo_concurrency.lockutils [req-18db477e-adec-4292-89c1-03d56f00061f req-c3f19bcb-60aa-446a-98f1-4aa54c875a13 service nova] Acquiring lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.563383] env[68437]: DEBUG oslo_concurrency.lockutils [req-18db477e-adec-4292-89c1-03d56f00061f req-c3f19bcb-60aa-446a-98f1-4aa54c875a13 service nova] Acquired lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.563963] env[68437]: DEBUG nova.network.neutron [req-18db477e-adec-4292-89c1-03d56f00061f req-c3f19bcb-60aa-446a-98f1-4aa54c875a13 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Refreshing network info cache for port 63b32499-5908-4ff2-8386-9e979aee59c9 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1114.595052] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.991715] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Releasing lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.992094] env[68437]: DEBUG nova.compute.manager [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Instance network_info: |[{"id": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "address": "fa:16:3e:cb:16:0c", "network": {"id": "a62e1a24-0149-4f49-9572-b9b3f0a73a40", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-912812525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1971ad9ba5bd4d50a68495c8b2ab7341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94228d13-e4", "ovs_interfaceid": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1114.992599] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:16:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db68bd64-5b56-49af-a075-13dcf85cb2e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94228d13-e4c8-47f2-9bfa-37e85949fe81', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1115.004955] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Creating folder: Project (1971ad9ba5bd4d50a68495c8b2ab7341). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1115.005271] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ba1f73a-09df-4307-8718-30ff6a2016ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.016849] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Created folder: Project (1971ad9ba5bd4d50a68495c8b2ab7341) in parent group-v590848. [ 1115.017066] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Creating folder: Instances. Parent ref: group-v591109. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1115.017308] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a83b301-e69d-48a3-b7db-d508de62da04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.026369] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Created folder: Instances in parent group-v591109. [ 1115.026369] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1115.032016] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1115.032016] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc2111a6-d874-447d-a88b-5dbb292053df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.049061] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944850, 'name': Rename_Task, 'duration_secs': 0.154227} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.050041] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.050228] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1115.050228] env[68437]: value = "task-2944853" [ 1115.050228] env[68437]: _type = "Task" [ 1115.050228] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.050405] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4741fe81-70a3-4c67-a140-10570a364c23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.060196] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944853, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.061557] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.061869] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1115.061869] env[68437]: value = "task-2944854" [ 1115.061869] env[68437]: _type = "Task" [ 1115.061869] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.071279] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.101202] env[68437]: DEBUG nova.compute.manager [req-32d77087-5556-4eee-a5dc-411b488f8b95 req-308a080c-9ee7-47c5-8c85-0251a28259e1 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Received event network-changed-94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1115.101767] env[68437]: DEBUG nova.compute.manager [req-32d77087-5556-4eee-a5dc-411b488f8b95 req-308a080c-9ee7-47c5-8c85-0251a28259e1 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Refreshing instance network info cache due to event network-changed-94228d13-e4c8-47f2-9bfa-37e85949fe81. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1115.102123] env[68437]: DEBUG oslo_concurrency.lockutils [req-32d77087-5556-4eee-a5dc-411b488f8b95 req-308a080c-9ee7-47c5-8c85-0251a28259e1 service nova] Acquiring lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.102397] env[68437]: DEBUG oslo_concurrency.lockutils [req-32d77087-5556-4eee-a5dc-411b488f8b95 req-308a080c-9ee7-47c5-8c85-0251a28259e1 service nova] Acquired lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.102668] env[68437]: DEBUG nova.network.neutron [req-32d77087-5556-4eee-a5dc-411b488f8b95 req-308a080c-9ee7-47c5-8c85-0251a28259e1 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Refreshing network info cache for port 94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1115.105218] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1115.105626] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.822s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.105960] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.682s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.108062] env[68437]: INFO nova.compute.claims [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1115.201198] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "e81e633d-34a6-443d-a2fe-95e6d8afa552" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.201630] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.351172] env[68437]: DEBUG nova.network.neutron [req-18db477e-adec-4292-89c1-03d56f00061f req-c3f19bcb-60aa-446a-98f1-4aa54c875a13 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Updated VIF entry in instance network info cache for port 63b32499-5908-4ff2-8386-9e979aee59c9. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1115.351172] env[68437]: DEBUG nova.network.neutron [req-18db477e-adec-4292-89c1-03d56f00061f req-c3f19bcb-60aa-446a-98f1-4aa54c875a13 service nova] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Updating instance_info_cache with network_info: [{"id": "63b32499-5908-4ff2-8386-9e979aee59c9", "address": "fa:16:3e:3b:f7:4a", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": null, "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap63b32499-59", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.563122] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944853, 'name': CreateVM_Task, 'duration_secs': 0.360298} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.568045] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1115.568045] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.568045] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.568307] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1115.568911] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af146897-40df-456c-8f60-e6aa5a6cace7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.576252] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944854, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.580824] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1115.580824] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521397d5-0d6f-67e7-b585-ec13da5577c2" [ 1115.580824] env[68437]: _type = "Task" [ 1115.580824] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.589703] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521397d5-0d6f-67e7-b585-ec13da5577c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.704584] env[68437]: DEBUG nova.compute.manager [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1115.857344] env[68437]: DEBUG oslo_concurrency.lockutils [req-18db477e-adec-4292-89c1-03d56f00061f req-c3f19bcb-60aa-446a-98f1-4aa54c875a13 service nova] Releasing lock "refresh_cache-2b985ca2-f0d1-4937-aa33-154aa53b0a40" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.943730] env[68437]: DEBUG nova.network.neutron [req-32d77087-5556-4eee-a5dc-411b488f8b95 req-308a080c-9ee7-47c5-8c85-0251a28259e1 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Updated VIF entry in instance network info cache for port 94228d13-e4c8-47f2-9bfa-37e85949fe81. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1115.944594] env[68437]: DEBUG nova.network.neutron [req-32d77087-5556-4eee-a5dc-411b488f8b95 req-308a080c-9ee7-47c5-8c85-0251a28259e1 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Updating instance_info_cache with network_info: [{"id": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "address": "fa:16:3e:cb:16:0c", "network": {"id": "a62e1a24-0149-4f49-9572-b9b3f0a73a40", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-912812525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1971ad9ba5bd4d50a68495c8b2ab7341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94228d13-e4", "ovs_interfaceid": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.074535] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944854, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.091445] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521397d5-0d6f-67e7-b585-ec13da5577c2, 'name': SearchDatastore_Task, 'duration_secs': 0.010599} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.091882] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.092205] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1116.092434] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.092921] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.092921] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1116.093103] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5899f2ae-3882-4606-b80c-9da2e9fc4f7f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.110896] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1116.111118] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1116.111921] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c619455-69c4-4745-a4d8-ab0754a6cbe7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.117598] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1116.117598] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c6da82-2a02-99f8-535d-306bae8200df" [ 1116.117598] env[68437]: _type = "Task" [ 1116.117598] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.129264] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c6da82-2a02-99f8-535d-306bae8200df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.226110] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.309963] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489a9e9c-05fa-479e-a678-0d433081d6bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.318285] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bc9b87-f91a-40a9-bc38-629c1a40ec51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.347611] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c995228-a207-4173-a514-d4dd56b8c300 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.355611] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52e3492-a719-4e78-a09a-5a3c2a018b41 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.370971] env[68437]: DEBUG nova.compute.provider_tree [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.449033] env[68437]: DEBUG oslo_concurrency.lockutils [req-32d77087-5556-4eee-a5dc-411b488f8b95 req-308a080c-9ee7-47c5-8c85-0251a28259e1 service nova] Releasing lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.574525] env[68437]: DEBUG oslo_vmware.api [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944854, 'name': PowerOnVM_Task, 'duration_secs': 1.068648} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.574947] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1116.575152] env[68437]: INFO nova.compute.manager [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1116.575288] env[68437]: DEBUG nova.compute.manager [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1116.576252] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efab95ef-f520-4176-a79d-b4225920fcbc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.629113] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c6da82-2a02-99f8-535d-306bae8200df, 'name': SearchDatastore_Task, 'duration_secs': 0.012163} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.629600] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9872a144-53be-439f-9dc1-974dd49fe97f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.635200] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1116.635200] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52974214-a5a2-5378-03f8-4104a549c81e" [ 1116.635200] env[68437]: _type = "Task" [ 1116.635200] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.645442] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52974214-a5a2-5378-03f8-4104a549c81e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.875387] env[68437]: DEBUG nova.scheduler.client.report [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1117.093365] env[68437]: INFO nova.compute.manager [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Took 17.94 seconds to build instance. [ 1117.147050] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52974214-a5a2-5378-03f8-4104a549c81e, 'name': SearchDatastore_Task, 'duration_secs': 0.013295} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.147256] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.147516] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] cdb5b8d0-03ab-4020-a9aa-00688f7aef8e/cdb5b8d0-03ab-4020-a9aa-00688f7aef8e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1117.147761] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-834f6507-c77d-4300-897e-42d1aca2b04e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.155122] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1117.155122] env[68437]: value = "task-2944856" [ 1117.155122] env[68437]: _type = "Task" [ 1117.155122] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.162381] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.381672] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.275s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.381860] env[68437]: DEBUG nova.compute.manager [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1117.384506] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.851s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.384741] env[68437]: DEBUG nova.objects.instance [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'resources' on Instance uuid 2b985ca2-f0d1-4937-aa33-154aa53b0a40 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.521020] env[68437]: DEBUG nova.compute.manager [req-55dc478d-1d5e-480f-9090-7bfe1814eceb req-192619e4-4e23-4414-b1a9-2e071ec41356 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Received event network-changed-d8383639-3dc6-429a-84bb-d34c1c98e26d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1117.521313] env[68437]: DEBUG nova.compute.manager [req-55dc478d-1d5e-480f-9090-7bfe1814eceb req-192619e4-4e23-4414-b1a9-2e071ec41356 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Refreshing instance network info cache due to event network-changed-d8383639-3dc6-429a-84bb-d34c1c98e26d. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1117.521560] env[68437]: DEBUG oslo_concurrency.lockutils [req-55dc478d-1d5e-480f-9090-7bfe1814eceb req-192619e4-4e23-4414-b1a9-2e071ec41356 service nova] Acquiring lock "refresh_cache-ea09a88a-d426-4af4-aa07-945ccfbf2a24" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.521754] env[68437]: DEBUG oslo_concurrency.lockutils [req-55dc478d-1d5e-480f-9090-7bfe1814eceb req-192619e4-4e23-4414-b1a9-2e071ec41356 service nova] Acquired lock "refresh_cache-ea09a88a-d426-4af4-aa07-945ccfbf2a24" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.521974] env[68437]: DEBUG nova.network.neutron [req-55dc478d-1d5e-480f-9090-7bfe1814eceb req-192619e4-4e23-4414-b1a9-2e071ec41356 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Refreshing network info cache for port d8383639-3dc6-429a-84bb-d34c1c98e26d {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1117.628670] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5c9ca73a-bbc2-4935-b2e5-5148a5553fed tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.455s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.667677] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944856, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.888481] env[68437]: DEBUG nova.compute.utils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1117.890412] env[68437]: DEBUG nova.compute.manager [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1117.890604] env[68437]: DEBUG nova.network.neutron [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1117.892786] env[68437]: DEBUG nova.objects.instance [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'numa_topology' on Instance uuid 2b985ca2-f0d1-4937-aa33-154aa53b0a40 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.951133] env[68437]: DEBUG nova.policy [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44a64439ac8d41239fad856a83a02e1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e28f7fd8c8d412f8c9e1624c55d6604', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1118.169548] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944856, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53704} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.169839] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] cdb5b8d0-03ab-4020-a9aa-00688f7aef8e/cdb5b8d0-03ab-4020-a9aa-00688f7aef8e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1118.169839] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1118.170109] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70af9afc-a351-4d71-be76-d6b0d717758f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.176751] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1118.176751] env[68437]: value = "task-2944858" [ 1118.176751] env[68437]: _type = "Task" [ 1118.176751] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.189111] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944858, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.246917] env[68437]: DEBUG nova.network.neutron [req-55dc478d-1d5e-480f-9090-7bfe1814eceb req-192619e4-4e23-4414-b1a9-2e071ec41356 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Updated VIF entry in instance network info cache for port d8383639-3dc6-429a-84bb-d34c1c98e26d. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1118.247323] env[68437]: DEBUG nova.network.neutron [req-55dc478d-1d5e-480f-9090-7bfe1814eceb req-192619e4-4e23-4414-b1a9-2e071ec41356 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Updating instance_info_cache with network_info: [{"id": "d8383639-3dc6-429a-84bb-d34c1c98e26d", "address": "fa:16:3e:50:fe:b5", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd8383639-3d", "ovs_interfaceid": "d8383639-3dc6-429a-84bb-d34c1c98e26d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.336882] env[68437]: DEBUG nova.network.neutron [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Successfully created port: 5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1118.395309] env[68437]: DEBUG nova.compute.manager [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1118.398086] env[68437]: DEBUG nova.objects.base [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Object Instance<2b985ca2-f0d1-4937-aa33-154aa53b0a40> lazy-loaded attributes: resources,numa_topology {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1118.565850] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410792b3-658c-4c31-93e3-76cbdede9f0e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.573752] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2dc98f-fb0d-4c35-970a-d903794a0264 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.609088] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241cbf86-37d4-4ccf-be85-7b1384cb7cca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.616895] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150a0298-5c1f-4e01-b67e-9cd95931d70b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.631554] env[68437]: DEBUG nova.compute.provider_tree [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.687542] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944858, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06929} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.687811] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1118.688565] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd3658d-5f4b-44e8-81db-f5a3fefd9d90 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.709454] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] cdb5b8d0-03ab-4020-a9aa-00688f7aef8e/cdb5b8d0-03ab-4020-a9aa-00688f7aef8e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1118.709981] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4936997d-b22f-4910-925e-5053957802c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.728171] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1118.728171] env[68437]: value = "task-2944859" [ 1118.728171] env[68437]: _type = "Task" [ 1118.728171] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.735744] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944859, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.750478] env[68437]: DEBUG oslo_concurrency.lockutils [req-55dc478d-1d5e-480f-9090-7bfe1814eceb req-192619e4-4e23-4414-b1a9-2e071ec41356 service nova] Releasing lock "refresh_cache-ea09a88a-d426-4af4-aa07-945ccfbf2a24" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.134862] env[68437]: DEBUG nova.scheduler.client.report [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1119.238448] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944859, 'name': ReconfigVM_Task, 'duration_secs': 0.338595} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.239414] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Reconfigured VM instance instance-00000061 to attach disk [datastore1] cdb5b8d0-03ab-4020-a9aa-00688f7aef8e/cdb5b8d0-03ab-4020-a9aa-00688f7aef8e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1119.239414] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79e9a83c-c491-4669-a33a-192c6b1de9a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.245920] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1119.245920] env[68437]: value = "task-2944860" [ 1119.245920] env[68437]: _type = "Task" [ 1119.245920] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.254674] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944860, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.410113] env[68437]: DEBUG nova.compute.manager [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1119.436591] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1119.436811] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1119.436969] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1119.437171] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1119.437353] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1119.437535] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1119.437786] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1119.437965] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1119.438163] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1119.438330] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1119.438505] env[68437]: DEBUG nova.virt.hardware [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1119.439372] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d104cd3d-993d-4aa8-a46d-dff8fb7dae74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.447884] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665011c5-056c-446a-82ce-9d4399691c7f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.640838] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.256s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.643364] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.417s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.644797] env[68437]: INFO nova.compute.claims [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1119.757220] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944860, 'name': Rename_Task, 'duration_secs': 0.18731} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.757577] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1119.758132] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-acfd9dad-468c-4dbb-b259-4b5d54f1f586 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.766115] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1119.766115] env[68437]: value = "task-2944862" [ 1119.766115] env[68437]: _type = "Task" [ 1119.766115] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.772419] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944862, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.785279] env[68437]: DEBUG nova.compute.manager [req-c52a7da4-107b-47b4-841f-b67e850d8e63 req-3d31a7db-b0ea-4046-9ae2-f7652259e2d8 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received event network-vif-plugged-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1119.785279] env[68437]: DEBUG oslo_concurrency.lockutils [req-c52a7da4-107b-47b4-841f-b67e850d8e63 req-3d31a7db-b0ea-4046-9ae2-f7652259e2d8 service nova] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.785279] env[68437]: DEBUG oslo_concurrency.lockutils [req-c52a7da4-107b-47b4-841f-b67e850d8e63 req-3d31a7db-b0ea-4046-9ae2-f7652259e2d8 service nova] Lock "aff861ed-e792-480a-811e-c157c0606d08-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.785279] env[68437]: DEBUG oslo_concurrency.lockutils [req-c52a7da4-107b-47b4-841f-b67e850d8e63 req-3d31a7db-b0ea-4046-9ae2-f7652259e2d8 service nova] Lock "aff861ed-e792-480a-811e-c157c0606d08-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.785279] env[68437]: DEBUG nova.compute.manager [req-c52a7da4-107b-47b4-841f-b67e850d8e63 req-3d31a7db-b0ea-4046-9ae2-f7652259e2d8 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] No waiting events found dispatching network-vif-plugged-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1119.785279] env[68437]: WARNING nova.compute.manager [req-c52a7da4-107b-47b4-841f-b67e850d8e63 req-3d31a7db-b0ea-4046-9ae2-f7652259e2d8 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received unexpected event network-vif-plugged-5f058ce1-be0f-4b97-be84-11302a668781 for instance with vm_state building and task_state spawning. [ 1119.872478] env[68437]: DEBUG nova.network.neutron [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Successfully updated port: 5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1120.154078] env[68437]: DEBUG oslo_concurrency.lockutils [None req-23e6239e-32e5-4232-9d6c-a02a63a6c1db tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.876s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.154904] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.093s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.155165] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.155360] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.155528] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.158705] env[68437]: INFO nova.compute.manager [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Terminating instance [ 1120.274827] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944862, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.374976] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.374976] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.374976] env[68437]: DEBUG nova.network.neutron [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1120.664358] env[68437]: DEBUG nova.compute.manager [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1120.664665] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1120.664984] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-18946645-c9d8-4938-a585-776bd0656500 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.674527] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff36da8-8562-46e3-b199-10f7a91b5723 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.707762] env[68437]: WARNING nova.virt.vmwareapi.vmops [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2b985ca2-f0d1-4937-aa33-154aa53b0a40 could not be found. [ 1120.707979] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1120.708149] env[68437]: INFO nova.compute.manager [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1120.708388] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1120.708626] env[68437]: DEBUG nova.compute.manager [-] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1120.708720] env[68437]: DEBUG nova.network.neutron [-] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1120.776057] env[68437]: DEBUG oslo_vmware.api [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944862, 'name': PowerOnVM_Task, 'duration_secs': 0.670694} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.776057] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1120.776235] env[68437]: INFO nova.compute.manager [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1120.776565] env[68437]: DEBUG nova.compute.manager [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1120.777140] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7f85ab-1f43-4837-bf98-204cf19e027d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.819095] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0b189e-c92a-4ab8-95a6-78f17c1e4da1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.827122] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df54507-d061-49f9-946a-c66f02ca6492 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.861042] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fce5136-9329-4c83-9478-0b48b233e719 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.868347] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cf6209-6cd0-4f08-9b8a-a9715b2beaca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.885208] env[68437]: DEBUG nova.compute.provider_tree [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.918966] env[68437]: DEBUG nova.network.neutron [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1121.044512] env[68437]: DEBUG nova.network.neutron [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating instance_info_cache with network_info: [{"id": "5f058ce1-be0f-4b97-be84-11302a668781", "address": "fa:16:3e:4d:8c:0f", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f058ce1-be", "ovs_interfaceid": "5f058ce1-be0f-4b97-be84-11302a668781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.294594] env[68437]: INFO nova.compute.manager [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Took 17.95 seconds to build instance. [ 1121.388207] env[68437]: DEBUG nova.scheduler.client.report [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1121.455712] env[68437]: DEBUG nova.network.neutron [-] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.547305] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.548046] env[68437]: DEBUG nova.compute.manager [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Instance network_info: |[{"id": "5f058ce1-be0f-4b97-be84-11302a668781", "address": "fa:16:3e:4d:8c:0f", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f058ce1-be", "ovs_interfaceid": "5f058ce1-be0f-4b97-be84-11302a668781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1121.548219] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:8c:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f058ce1-be0f-4b97-be84-11302a668781', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1121.556409] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating folder: Project (0e28f7fd8c8d412f8c9e1624c55d6604). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1121.556757] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b03fc72-b75b-4f0f-b587-8095295576d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.567688] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Created folder: Project (0e28f7fd8c8d412f8c9e1624c55d6604) in parent group-v590848. [ 1121.567922] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating folder: Instances. Parent ref: group-v591113. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1121.568215] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84a466e9-891d-4a92-9579-6f4daf0ca60e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.577394] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Created folder: Instances in parent group-v591113. [ 1121.577684] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1121.577916] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aff861ed-e792-480a-811e-c157c0606d08] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1121.578164] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-934c942c-cee5-4f7b-875b-8e7ef793fdfb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.598137] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1121.598137] env[68437]: value = "task-2944865" [ 1121.598137] env[68437]: _type = "Task" [ 1121.598137] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.607415] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944865, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.796190] env[68437]: DEBUG oslo_concurrency.lockutils [None req-419c890a-4bae-47c2-a3e8-8c468982e5fe tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.468s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.821188] env[68437]: DEBUG nova.compute.manager [req-deb21ec1-9d89-40bd-bacf-7ccb490a9856 req-5b85708b-cf60-4ccf-a526-a9f93add45a2 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received event network-changed-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1121.821434] env[68437]: DEBUG nova.compute.manager [req-deb21ec1-9d89-40bd-bacf-7ccb490a9856 req-5b85708b-cf60-4ccf-a526-a9f93add45a2 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Refreshing instance network info cache due to event network-changed-5f058ce1-be0f-4b97-be84-11302a668781. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1121.821792] env[68437]: DEBUG oslo_concurrency.lockutils [req-deb21ec1-9d89-40bd-bacf-7ccb490a9856 req-5b85708b-cf60-4ccf-a526-a9f93add45a2 service nova] Acquiring lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.821928] env[68437]: DEBUG oslo_concurrency.lockutils [req-deb21ec1-9d89-40bd-bacf-7ccb490a9856 req-5b85708b-cf60-4ccf-a526-a9f93add45a2 service nova] Acquired lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1121.822084] env[68437]: DEBUG nova.network.neutron [req-deb21ec1-9d89-40bd-bacf-7ccb490a9856 req-5b85708b-cf60-4ccf-a526-a9f93add45a2 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Refreshing network info cache for port 5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1121.892373] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.892872] env[68437]: DEBUG nova.compute.manager [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1121.958495] env[68437]: INFO nova.compute.manager [-] [instance: 2b985ca2-f0d1-4937-aa33-154aa53b0a40] Took 1.25 seconds to deallocate network for instance. [ 1122.112577] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944865, 'name': CreateVM_Task, 'duration_secs': 0.320411} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.112797] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aff861ed-e792-480a-811e-c157c0606d08] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1122.113567] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.113767] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.114214] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1122.114457] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d54a413-e8f8-42f1-91c4-d6e5606b989f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.119193] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1122.119193] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d6d442-7ed2-96d5-ad13-d6f1848713ce" [ 1122.119193] env[68437]: _type = "Task" [ 1122.119193] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.127070] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d6d442-7ed2-96d5-ad13-d6f1848713ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.400379] env[68437]: DEBUG nova.compute.utils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1122.401751] env[68437]: DEBUG nova.compute.manager [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1122.401922] env[68437]: DEBUG nova.network.neutron [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1122.475854] env[68437]: DEBUG nova.policy [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f745cbd7edb641af8623447b00021ac6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b7dfebb79e54e4fba7e0b142f99d7eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1122.527862] env[68437]: DEBUG nova.network.neutron [req-deb21ec1-9d89-40bd-bacf-7ccb490a9856 req-5b85708b-cf60-4ccf-a526-a9f93add45a2 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updated VIF entry in instance network info cache for port 5f058ce1-be0f-4b97-be84-11302a668781. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1122.528236] env[68437]: DEBUG nova.network.neutron [req-deb21ec1-9d89-40bd-bacf-7ccb490a9856 req-5b85708b-cf60-4ccf-a526-a9f93add45a2 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating instance_info_cache with network_info: [{"id": "5f058ce1-be0f-4b97-be84-11302a668781", "address": "fa:16:3e:4d:8c:0f", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f058ce1-be", "ovs_interfaceid": "5f058ce1-be0f-4b97-be84-11302a668781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.629796] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d6d442-7ed2-96d5-ad13-d6f1848713ce, 'name': SearchDatastore_Task, 'duration_secs': 0.0093} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.630117] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1122.630360] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1122.630600] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.630747] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.630924] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1122.631212] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70f2171e-3537-445a-9c48-90adffbdd293 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.639141] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1122.639438] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1122.640204] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e9736e2-d0c4-416b-ac25-bc409f1f841f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.645368] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1122.645368] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c13e85-366b-6fe4-d1f8-094861682357" [ 1122.645368] env[68437]: _type = "Task" [ 1122.645368] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.652581] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c13e85-366b-6fe4-d1f8-094861682357, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.907776] env[68437]: DEBUG nova.compute.manager [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1122.979372] env[68437]: DEBUG nova.network.neutron [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Successfully created port: a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1122.985217] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fb905c8e-edf0-42e6-8f28-e7c0e7e108f3 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "2b985ca2-f0d1-4937-aa33-154aa53b0a40" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.830s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.031163] env[68437]: DEBUG oslo_concurrency.lockutils [req-deb21ec1-9d89-40bd-bacf-7ccb490a9856 req-5b85708b-cf60-4ccf-a526-a9f93add45a2 service nova] Releasing lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.156208] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c13e85-366b-6fe4-d1f8-094861682357, 'name': SearchDatastore_Task, 'duration_secs': 0.022012} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.157332] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-773b3587-ac0a-4f79-abde-514fa0e40493 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.163238] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1123.163238] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527f43c9-6975-66f3-4b39-51a1ff95c6bb" [ 1123.163238] env[68437]: _type = "Task" [ 1123.163238] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.171891] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527f43c9-6975-66f3-4b39-51a1ff95c6bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.675263] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527f43c9-6975-66f3-4b39-51a1ff95c6bb, 'name': SearchDatastore_Task, 'duration_secs': 0.011508} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.675544] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.675765] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] aff861ed-e792-480a-811e-c157c0606d08/aff861ed-e792-480a-811e-c157c0606d08.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1123.676028] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44349820-7532-425e-b64f-4edf61ca580f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.683020] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1123.683020] env[68437]: value = "task-2944866" [ 1123.683020] env[68437]: _type = "Task" [ 1123.683020] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.690536] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944866, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.913376] env[68437]: DEBUG nova.compute.manager [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1123.951575] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1123.951911] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1123.952122] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1123.952415] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1123.952661] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1123.952823] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1123.954143] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1123.954420] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1123.954664] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1123.954946] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1123.955178] env[68437]: DEBUG nova.virt.hardware [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1123.956240] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922abc14-8081-4b1b-b495-f8031cf91b66 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.966369] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedd6333-7365-4ef6-99db-c260e4ab15e4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.194580] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944866, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.380107] env[68437]: DEBUG nova.compute.manager [req-282bb6c2-9420-4296-8bed-31d7f7cf4dc7 req-2ab293a5-c215-47ca-9ad8-071c0ad382a3 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Received event network-changed-94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1124.380320] env[68437]: DEBUG nova.compute.manager [req-282bb6c2-9420-4296-8bed-31d7f7cf4dc7 req-2ab293a5-c215-47ca-9ad8-071c0ad382a3 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Refreshing instance network info cache due to event network-changed-94228d13-e4c8-47f2-9bfa-37e85949fe81. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1124.380529] env[68437]: DEBUG oslo_concurrency.lockutils [req-282bb6c2-9420-4296-8bed-31d7f7cf4dc7 req-2ab293a5-c215-47ca-9ad8-071c0ad382a3 service nova] Acquiring lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.380672] env[68437]: DEBUG oslo_concurrency.lockutils [req-282bb6c2-9420-4296-8bed-31d7f7cf4dc7 req-2ab293a5-c215-47ca-9ad8-071c0ad382a3 service nova] Acquired lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.380833] env[68437]: DEBUG nova.network.neutron [req-282bb6c2-9420-4296-8bed-31d7f7cf4dc7 req-2ab293a5-c215-47ca-9ad8-071c0ad382a3 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Refreshing network info cache for port 94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1124.697175] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944866, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.681472} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.697529] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] aff861ed-e792-480a-811e-c157c0606d08/aff861ed-e792-480a-811e-c157c0606d08.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1124.697801] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1124.698233] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23bfe1c2-04e0-4b43-926b-659adf7c1f7d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.707494] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1124.707494] env[68437]: value = "task-2944868" [ 1124.707494] env[68437]: _type = "Task" [ 1124.707494] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.716771] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944868, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.982837] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "4fb5a384-0792-40df-b361-0784397a897f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.983084] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "4fb5a384-0792-40df-b361-0784397a897f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.107463] env[68437]: DEBUG nova.network.neutron [req-282bb6c2-9420-4296-8bed-31d7f7cf4dc7 req-2ab293a5-c215-47ca-9ad8-071c0ad382a3 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Updated VIF entry in instance network info cache for port 94228d13-e4c8-47f2-9bfa-37e85949fe81. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1125.107974] env[68437]: DEBUG nova.network.neutron [req-282bb6c2-9420-4296-8bed-31d7f7cf4dc7 req-2ab293a5-c215-47ca-9ad8-071c0ad382a3 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Updating instance_info_cache with network_info: [{"id": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "address": "fa:16:3e:cb:16:0c", "network": {"id": "a62e1a24-0149-4f49-9572-b9b3f0a73a40", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-912812525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1971ad9ba5bd4d50a68495c8b2ab7341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94228d13-e4", "ovs_interfaceid": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.139096] env[68437]: DEBUG nova.network.neutron [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Successfully updated port: a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1125.219391] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944868, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078566} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.219795] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1125.220938] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb958086-3d98-49cc-99dc-4c2afd2e2970 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.224522] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "76d97a56-21a2-4363-a987-ef872f056510" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.224773] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.246969] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] aff861ed-e792-480a-811e-c157c0606d08/aff861ed-e792-480a-811e-c157c0606d08.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1125.247942] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac704186-25f3-42ab-be4c-5254174be433 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.267966] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1125.267966] env[68437]: value = "task-2944869" [ 1125.267966] env[68437]: _type = "Task" [ 1125.267966] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.275924] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944869, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.486665] env[68437]: DEBUG nova.compute.manager [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1125.610558] env[68437]: DEBUG oslo_concurrency.lockutils [req-282bb6c2-9420-4296-8bed-31d7f7cf4dc7 req-2ab293a5-c215-47ca-9ad8-071c0ad382a3 service nova] Releasing lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.642540] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "refresh_cache-e81e633d-34a6-443d-a2fe-95e6d8afa552" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.642672] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "refresh_cache-e81e633d-34a6-443d-a2fe-95e6d8afa552" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.642821] env[68437]: DEBUG nova.network.neutron [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1125.729048] env[68437]: DEBUG nova.compute.utils [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1125.777947] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944869, 'name': ReconfigVM_Task, 'duration_secs': 0.362309} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.778285] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Reconfigured VM instance instance-00000062 to attach disk [datastore2] aff861ed-e792-480a-811e-c157c0606d08/aff861ed-e792-480a-811e-c157c0606d08.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1125.779126] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a77829f-895b-46f3-a584-50715274b8d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.787269] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1125.787269] env[68437]: value = "task-2944870" [ 1125.787269] env[68437]: _type = "Task" [ 1125.787269] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.795746] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944870, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.016133] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.016434] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.017943] env[68437]: INFO nova.compute.claims [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.192424] env[68437]: DEBUG nova.network.neutron [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1126.236023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.301053] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944870, 'name': Rename_Task, 'duration_secs': 0.153108} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.301053] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1126.301053] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe88aa93-f603-4c3e-8c90-c3118182326e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.307771] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1126.307771] env[68437]: value = "task-2944872" [ 1126.307771] env[68437]: _type = "Task" [ 1126.307771] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.315251] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.432497] env[68437]: DEBUG nova.network.neutron [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Updating instance_info_cache with network_info: [{"id": "a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b", "address": "fa:16:3e:9e:37:a9", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8b8a7ab-dd", "ovs_interfaceid": "a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.626065] env[68437]: DEBUG nova.compute.manager [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Received event network-vif-plugged-a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1126.626266] env[68437]: DEBUG oslo_concurrency.lockutils [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] Acquiring lock "e81e633d-34a6-443d-a2fe-95e6d8afa552-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.626542] env[68437]: DEBUG oslo_concurrency.lockutils [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.626709] env[68437]: DEBUG oslo_concurrency.lockutils [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.626877] env[68437]: DEBUG nova.compute.manager [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] No waiting events found dispatching network-vif-plugged-a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1126.627053] env[68437]: WARNING nova.compute.manager [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Received unexpected event network-vif-plugged-a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b for instance with vm_state building and task_state spawning. [ 1126.627221] env[68437]: DEBUG nova.compute.manager [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Received event network-changed-a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1126.627374] env[68437]: DEBUG nova.compute.manager [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Refreshing instance network info cache due to event network-changed-a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1126.627534] env[68437]: DEBUG oslo_concurrency.lockutils [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] Acquiring lock "refresh_cache-e81e633d-34a6-443d-a2fe-95e6d8afa552" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.818923] env[68437]: DEBUG oslo_vmware.api [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944872, 'name': PowerOnVM_Task, 'duration_secs': 0.429023} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.820151] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1126.820359] env[68437]: INFO nova.compute.manager [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Took 7.41 seconds to spawn the instance on the hypervisor. [ 1126.820542] env[68437]: DEBUG nova.compute.manager [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1126.823520] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d992d1-fb15-48c1-b9ba-04b1de0d8434 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.936942] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "refresh_cache-e81e633d-34a6-443d-a2fe-95e6d8afa552" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.937291] env[68437]: DEBUG nova.compute.manager [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Instance network_info: |[{"id": "a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b", "address": "fa:16:3e:9e:37:a9", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8b8a7ab-dd", "ovs_interfaceid": "a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1126.937588] env[68437]: DEBUG oslo_concurrency.lockutils [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] Acquired lock "refresh_cache-e81e633d-34a6-443d-a2fe-95e6d8afa552" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.937770] env[68437]: DEBUG nova.network.neutron [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Refreshing network info cache for port a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1126.939073] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:37:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1126.946846] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Creating folder: Project (4b7dfebb79e54e4fba7e0b142f99d7eb). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1126.947870] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8620167-bead-46f1-a5a5-157dd7cb88fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.959735] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Created folder: Project (4b7dfebb79e54e4fba7e0b142f99d7eb) in parent group-v590848. [ 1126.959925] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Creating folder: Instances. Parent ref: group-v591116. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1126.960169] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82ddf955-1152-4fa4-b721-a460501e8ea3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.968891] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Created folder: Instances in parent group-v591116. [ 1126.969123] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1126.969323] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1126.969513] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a34b2cd1-8a9d-4124-af48-57e49d4cddbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.987992] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1126.987992] env[68437]: value = "task-2944875" [ 1126.987992] env[68437]: _type = "Task" [ 1126.987992] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.998034] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944875, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.189059] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5038dd61-80e9-416b-aefa-45d30767e4a5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.198287] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38fb9e7-8b6e-405d-a22c-b9be8a706349 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.229641] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72593783-d99a-4ec5-abcd-345eecafeb75 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.237286] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe53914c-27d4-4fe4-b9da-4e3274fc9e57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.253065] env[68437]: DEBUG nova.compute.provider_tree [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.292037] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "76d97a56-21a2-4363-a987-ef872f056510" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.292037] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.292037] env[68437]: INFO nova.compute.manager [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Attaching volume 29729ce2-5d97-4553-9b4b-d02c045e2fb3 to /dev/sdb [ 1127.324152] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6d45e8-f07b-4658-b49b-45c3004e9df0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.328831] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquiring lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.329208] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.329390] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquiring lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.329624] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.329858] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.332464] env[68437]: INFO nova.compute.manager [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Terminating instance [ 1127.344741] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff781235-5182-4a9a-93a1-f71c17f8c24b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.349069] env[68437]: INFO nova.compute.manager [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Took 16.95 seconds to build instance. [ 1127.363917] env[68437]: DEBUG nova.virt.block_device [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Updating existing volume attachment record: 05d0ca95-d0b3-4464-87cd-ffb75987cd21 {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1127.497641] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944875, 'name': CreateVM_Task, 'duration_secs': 0.339019} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.497706] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1127.498401] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.498568] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.498884] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1127.499151] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fca622c4-e692-40fb-90b3-72153befcffa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.503313] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1127.503313] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5269b0a7-76b6-5a99-7783-c93d47b35df3" [ 1127.503313] env[68437]: _type = "Task" [ 1127.503313] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.511125] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5269b0a7-76b6-5a99-7783-c93d47b35df3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.650048] env[68437]: DEBUG nova.network.neutron [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Updated VIF entry in instance network info cache for port a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1127.650424] env[68437]: DEBUG nova.network.neutron [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Updating instance_info_cache with network_info: [{"id": "a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b", "address": "fa:16:3e:9e:37:a9", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8b8a7ab-dd", "ovs_interfaceid": "a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.756544] env[68437]: DEBUG nova.scheduler.client.report [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1127.840780] env[68437]: DEBUG nova.compute.manager [req-4319769e-1262-4f50-b010-77ae82cf3423 req-830ebd71-e4fe-42f9-8cb9-b34059fbf355 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received event network-changed-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1127.841448] env[68437]: DEBUG nova.compute.manager [req-4319769e-1262-4f50-b010-77ae82cf3423 req-830ebd71-e4fe-42f9-8cb9-b34059fbf355 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Refreshing instance network info cache due to event network-changed-5f058ce1-be0f-4b97-be84-11302a668781. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1127.841448] env[68437]: DEBUG oslo_concurrency.lockutils [req-4319769e-1262-4f50-b010-77ae82cf3423 req-830ebd71-e4fe-42f9-8cb9-b34059fbf355 service nova] Acquiring lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.841448] env[68437]: DEBUG oslo_concurrency.lockutils [req-4319769e-1262-4f50-b010-77ae82cf3423 req-830ebd71-e4fe-42f9-8cb9-b34059fbf355 service nova] Acquired lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.841738] env[68437]: DEBUG nova.network.neutron [req-4319769e-1262-4f50-b010-77ae82cf3423 req-830ebd71-e4fe-42f9-8cb9-b34059fbf355 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Refreshing network info cache for port 5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1127.854018] env[68437]: DEBUG nova.compute.manager [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1127.854018] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1127.854018] env[68437]: DEBUG oslo_concurrency.lockutils [None req-fad87439-824b-42af-a6bd-5dbfab61e5af tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.462s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.854018] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c53edc-a49f-4bd0-bd98-58cc7e6681bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.861845] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1127.861845] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78d7f809-8b74-4f6e-af92-bad66936b98a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.868169] env[68437]: DEBUG oslo_vmware.api [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1127.868169] env[68437]: value = "task-2944878" [ 1127.868169] env[68437]: _type = "Task" [ 1127.868169] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.875839] env[68437]: DEBUG oslo_vmware.api [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.015959] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5269b0a7-76b6-5a99-7783-c93d47b35df3, 'name': SearchDatastore_Task, 'duration_secs': 0.009794} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.016371] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.016649] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1128.016894] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.017057] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.017252] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1128.017544] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-551355f8-3ecc-482c-abaa-2eb9f2af82c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.026055] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1128.026306] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1128.027074] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed6e67ce-a0bb-419d-ad69-3af90f72f24e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.033844] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1128.033844] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5292a8c7-c009-d39e-4de6-c6ed666ae68a" [ 1128.033844] env[68437]: _type = "Task" [ 1128.033844] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.042635] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5292a8c7-c009-d39e-4de6-c6ed666ae68a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.153322] env[68437]: DEBUG oslo_concurrency.lockutils [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] Releasing lock "refresh_cache-e81e633d-34a6-443d-a2fe-95e6d8afa552" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.153612] env[68437]: DEBUG nova.compute.manager [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Received event network-changed-94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1128.153818] env[68437]: DEBUG nova.compute.manager [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Refreshing instance network info cache due to event network-changed-94228d13-e4c8-47f2-9bfa-37e85949fe81. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1128.154061] env[68437]: DEBUG oslo_concurrency.lockutils [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] Acquiring lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.154217] env[68437]: DEBUG oslo_concurrency.lockutils [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] Acquired lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.154377] env[68437]: DEBUG nova.network.neutron [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Refreshing network info cache for port 94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1128.261101] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.261743] env[68437]: DEBUG nova.compute.manager [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1128.379413] env[68437]: DEBUG oslo_vmware.api [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944878, 'name': PowerOffVM_Task, 'duration_secs': 0.243457} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.379691] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1128.379857] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1128.380113] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad3a84d5-9d37-4de8-9bbc-1603f8b66d2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.464031] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1128.465205] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1128.465205] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Deleting the datastore file [datastore1] cdb5b8d0-03ab-4020-a9aa-00688f7aef8e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1128.465205] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5264752-0d9d-417e-a77e-de3596c5abe7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.475811] env[68437]: DEBUG oslo_vmware.api [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for the task: (returnval){ [ 1128.475811] env[68437]: value = "task-2944882" [ 1128.475811] env[68437]: _type = "Task" [ 1128.475811] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.483741] env[68437]: DEBUG oslo_vmware.api [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.544429] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5292a8c7-c009-d39e-4de6-c6ed666ae68a, 'name': SearchDatastore_Task, 'duration_secs': 0.008192} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.547602] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c95f8a46-d24b-4294-8a41-31c502ea7b79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.553390] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1128.553390] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52df4819-d5f5-4de0-4ec9-c23d13376e95" [ 1128.553390] env[68437]: _type = "Task" [ 1128.553390] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.562080] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52df4819-d5f5-4de0-4ec9-c23d13376e95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.581789] env[68437]: DEBUG nova.network.neutron [req-4319769e-1262-4f50-b010-77ae82cf3423 req-830ebd71-e4fe-42f9-8cb9-b34059fbf355 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updated VIF entry in instance network info cache for port 5f058ce1-be0f-4b97-be84-11302a668781. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1128.582198] env[68437]: DEBUG nova.network.neutron [req-4319769e-1262-4f50-b010-77ae82cf3423 req-830ebd71-e4fe-42f9-8cb9-b34059fbf355 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating instance_info_cache with network_info: [{"id": "5f058ce1-be0f-4b97-be84-11302a668781", "address": "fa:16:3e:4d:8c:0f", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f058ce1-be", "ovs_interfaceid": "5f058ce1-be0f-4b97-be84-11302a668781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.767241] env[68437]: DEBUG nova.compute.utils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1128.768589] env[68437]: DEBUG nova.compute.manager [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1128.768754] env[68437]: DEBUG nova.network.neutron [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1128.803954] env[68437]: DEBUG nova.policy [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ff3c9a96f10413f860946488fa85aee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38ad479949b24307b08e16fdb821c76f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1128.837926] env[68437]: DEBUG nova.network.neutron [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Updated VIF entry in instance network info cache for port 94228d13-e4c8-47f2-9bfa-37e85949fe81. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1128.838320] env[68437]: DEBUG nova.network.neutron [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Updating instance_info_cache with network_info: [{"id": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "address": "fa:16:3e:cb:16:0c", "network": {"id": "a62e1a24-0149-4f49-9572-b9b3f0a73a40", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-912812525-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1971ad9ba5bd4d50a68495c8b2ab7341", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94228d13-e4", "ovs_interfaceid": "94228d13-e4c8-47f2-9bfa-37e85949fe81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.986398] env[68437]: DEBUG oslo_vmware.api [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Task: {'id': task-2944882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.262742} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.986677] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1128.986862] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1128.987051] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1128.987233] env[68437]: INFO nova.compute.manager [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1128.987473] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1128.987664] env[68437]: DEBUG nova.compute.manager [-] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1128.987756] env[68437]: DEBUG nova.network.neutron [-] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1129.049951] env[68437]: DEBUG nova.network.neutron [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Successfully created port: b35125a2-2df8-4a1d-8e2a-7330ff771b84 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1129.064084] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52df4819-d5f5-4de0-4ec9-c23d13376e95, 'name': SearchDatastore_Task, 'duration_secs': 0.011271} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.064582] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.064946] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] e81e633d-34a6-443d-a2fe-95e6d8afa552/e81e633d-34a6-443d-a2fe-95e6d8afa552.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1129.065330] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dacb1e5a-8b9a-45d3-8e81-64d8f28ea058 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.072513] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1129.072513] env[68437]: value = "task-2944883" [ 1129.072513] env[68437]: _type = "Task" [ 1129.072513] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.081459] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944883, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.085046] env[68437]: DEBUG oslo_concurrency.lockutils [req-4319769e-1262-4f50-b010-77ae82cf3423 req-830ebd71-e4fe-42f9-8cb9-b34059fbf355 service nova] Releasing lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.256857] env[68437]: DEBUG nova.compute.manager [req-ae7b985c-16e9-43f8-a639-2a975e8c5a8a req-56bb6ee5-cf3f-4cd2-839d-db1aac6374fd service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Received event network-vif-deleted-94228d13-e4c8-47f2-9bfa-37e85949fe81 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1129.257096] env[68437]: INFO nova.compute.manager [req-ae7b985c-16e9-43f8-a639-2a975e8c5a8a req-56bb6ee5-cf3f-4cd2-839d-db1aac6374fd service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Neutron deleted interface 94228d13-e4c8-47f2-9bfa-37e85949fe81; detaching it from the instance and deleting it from the info cache [ 1129.257284] env[68437]: DEBUG nova.network.neutron [req-ae7b985c-16e9-43f8-a639-2a975e8c5a8a req-56bb6ee5-cf3f-4cd2-839d-db1aac6374fd service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.271618] env[68437]: DEBUG nova.compute.manager [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1129.340694] env[68437]: DEBUG oslo_concurrency.lockutils [req-1e397fc6-bcbf-42de-9f8c-947a4783a92b req-292c6475-f5b3-4190-8e85-8bbcb9dc68b2 service nova] Releasing lock "refresh_cache-cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.582491] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944883, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47034} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.582765] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] e81e633d-34a6-443d-a2fe-95e6d8afa552/e81e633d-34a6-443d-a2fe-95e6d8afa552.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1129.583151] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1129.583474] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5172422e-8fb7-4a77-9978-79278a5f546f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.590064] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1129.590064] env[68437]: value = "task-2944884" [ 1129.590064] env[68437]: _type = "Task" [ 1129.590064] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.598093] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944884, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.738718] env[68437]: DEBUG nova.network.neutron [-] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.759559] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2a0bd26-080b-4d7b-93af-78975ee4caf5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.769514] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14559440-bd07-47a5-ad54-8f598c80ad7b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.803238] env[68437]: DEBUG nova.compute.manager [req-ae7b985c-16e9-43f8-a639-2a975e8c5a8a req-56bb6ee5-cf3f-4cd2-839d-db1aac6374fd service nova] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Detach interface failed, port_id=94228d13-e4c8-47f2-9bfa-37e85949fe81, reason: Instance cdb5b8d0-03ab-4020-a9aa-00688f7aef8e could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1130.100139] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944884, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064027} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.100417] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1130.101235] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d10724e-9cb8-4334-857d-9bbbb94a74bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.123687] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] e81e633d-34a6-443d-a2fe-95e6d8afa552/e81e633d-34a6-443d-a2fe-95e6d8afa552.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1130.123933] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f57f1035-80df-4273-a7f9-991888c8ee73 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.143092] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1130.143092] env[68437]: value = "task-2944886" [ 1130.143092] env[68437]: _type = "Task" [ 1130.143092] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.152569] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944886, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.241374] env[68437]: INFO nova.compute.manager [-] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Took 1.25 seconds to deallocate network for instance. [ 1130.284635] env[68437]: DEBUG nova.compute.manager [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1130.312873] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1130.313182] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.313400] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1130.313587] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.313748] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1130.313897] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1130.314210] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1130.314417] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1130.314606] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1130.314770] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1130.314957] env[68437]: DEBUG nova.virt.hardware [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1130.315841] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7961028-e504-4a39-9584-8d9fc2d45464 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.324411] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9682e3ef-e0b9-441d-b59a-f36397f506fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.436016] env[68437]: DEBUG nova.compute.manager [req-378bba09-40dd-4b74-99aa-6ab4687cdb36 req-d0176077-fef8-41f6-9e12-dd0b814efbac service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Received event network-vif-plugged-b35125a2-2df8-4a1d-8e2a-7330ff771b84 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1130.436260] env[68437]: DEBUG oslo_concurrency.lockutils [req-378bba09-40dd-4b74-99aa-6ab4687cdb36 req-d0176077-fef8-41f6-9e12-dd0b814efbac service nova] Acquiring lock "4fb5a384-0792-40df-b361-0784397a897f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.436470] env[68437]: DEBUG oslo_concurrency.lockutils [req-378bba09-40dd-4b74-99aa-6ab4687cdb36 req-d0176077-fef8-41f6-9e12-dd0b814efbac service nova] Lock "4fb5a384-0792-40df-b361-0784397a897f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.436720] env[68437]: DEBUG oslo_concurrency.lockutils [req-378bba09-40dd-4b74-99aa-6ab4687cdb36 req-d0176077-fef8-41f6-9e12-dd0b814efbac service nova] Lock "4fb5a384-0792-40df-b361-0784397a897f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.436843] env[68437]: DEBUG nova.compute.manager [req-378bba09-40dd-4b74-99aa-6ab4687cdb36 req-d0176077-fef8-41f6-9e12-dd0b814efbac service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] No waiting events found dispatching network-vif-plugged-b35125a2-2df8-4a1d-8e2a-7330ff771b84 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1130.436982] env[68437]: WARNING nova.compute.manager [req-378bba09-40dd-4b74-99aa-6ab4687cdb36 req-d0176077-fef8-41f6-9e12-dd0b814efbac service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Received unexpected event network-vif-plugged-b35125a2-2df8-4a1d-8e2a-7330ff771b84 for instance with vm_state building and task_state spawning. [ 1130.521836] env[68437]: DEBUG nova.network.neutron [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Successfully updated port: b35125a2-2df8-4a1d-8e2a-7330ff771b84 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1130.654341] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944886, 'name': ReconfigVM_Task, 'duration_secs': 0.282373} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.654635] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Reconfigured VM instance instance-00000063 to attach disk [datastore2] e81e633d-34a6-443d-a2fe-95e6d8afa552/e81e633d-34a6-443d-a2fe-95e6d8afa552.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1130.655288] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-557758fb-6972-4656-bdbd-f6a6638bb8f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.663247] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1130.663247] env[68437]: value = "task-2944887" [ 1130.663247] env[68437]: _type = "Task" [ 1130.663247] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.673978] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944887, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.747948] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.748275] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.748508] env[68437]: DEBUG nova.objects.instance [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lazy-loading 'resources' on Instance uuid cdb5b8d0-03ab-4020-a9aa-00688f7aef8e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.022918] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-4fb5a384-0792-40df-b361-0784397a897f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.023102] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-4fb5a384-0792-40df-b361-0784397a897f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.023289] env[68437]: DEBUG nova.network.neutron [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1131.174540] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944887, 'name': Rename_Task, 'duration_secs': 0.145079} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.174846] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1131.175097] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89988e89-cc04-4208-9950-9bf8414e8515 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.183608] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1131.183608] env[68437]: value = "task-2944888" [ 1131.183608] env[68437]: _type = "Task" [ 1131.183608] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.191278] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.409218] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b387758d-8c15-4fa4-bcdb-86ba2ccb0f04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.418295] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420c1630-53d3-4b79-a1a3-19bb58d6be79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.451702] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112042d3-7526-4cc9-a0cc-660b3cbacbda {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.460496] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6396ea3-9a8c-4e58-b571-24e1bd6b4d7b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.475277] env[68437]: DEBUG nova.compute.provider_tree [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.556714] env[68437]: DEBUG nova.network.neutron [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1131.694518] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944888, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.696580] env[68437]: DEBUG nova.network.neutron [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Updating instance_info_cache with network_info: [{"id": "b35125a2-2df8-4a1d-8e2a-7330ff771b84", "address": "fa:16:3e:24:bf:88", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35125a2-2d", "ovs_interfaceid": "b35125a2-2df8-4a1d-8e2a-7330ff771b84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.978080] env[68437]: DEBUG nova.scheduler.client.report [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1132.021992] env[68437]: DEBUG oslo_concurrency.lockutils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Acquiring lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.022248] env[68437]: DEBUG oslo_concurrency.lockutils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.194518] env[68437]: DEBUG oslo_vmware.api [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944888, 'name': PowerOnVM_Task, 'duration_secs': 0.625524} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.194850] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1132.194965] env[68437]: INFO nova.compute.manager [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Took 8.28 seconds to spawn the instance on the hypervisor. [ 1132.195105] env[68437]: DEBUG nova.compute.manager [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1132.195898] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a27582-6e97-4738-9227-2e9905a2c327 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.198449] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-4fb5a384-0792-40df-b361-0784397a897f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.198729] env[68437]: DEBUG nova.compute.manager [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Instance network_info: |[{"id": "b35125a2-2df8-4a1d-8e2a-7330ff771b84", "address": "fa:16:3e:24:bf:88", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35125a2-2d", "ovs_interfaceid": "b35125a2-2df8-4a1d-8e2a-7330ff771b84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1132.199090] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:bf:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b35125a2-2df8-4a1d-8e2a-7330ff771b84', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1132.206632] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1132.207183] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1132.207407] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ffb1ce3-66f5-4ae0-9074-f044abbad50a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.230551] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1132.230551] env[68437]: value = "task-2944889" [ 1132.230551] env[68437]: _type = "Task" [ 1132.230551] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.237978] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944889, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.409634] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1132.410012] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1132.410953] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec5da6d-dd5a-436f-8f71-b24a7ce69e24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.435660] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882d3364-6aa9-421b-b5e7-65b9e0ae072e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.462883] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3/volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1132.464298] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ffd6399-cdda-4145-a2cc-70fdea9b718b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.479392] env[68437]: DEBUG nova.compute.manager [req-ccd814bd-dc27-4d96-a085-31378b3097c3 req-eaba5c35-3722-4537-9d56-9d42092e10ae service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Received event network-changed-b35125a2-2df8-4a1d-8e2a-7330ff771b84 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1132.479582] env[68437]: DEBUG nova.compute.manager [req-ccd814bd-dc27-4d96-a085-31378b3097c3 req-eaba5c35-3722-4537-9d56-9d42092e10ae service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Refreshing instance network info cache due to event network-changed-b35125a2-2df8-4a1d-8e2a-7330ff771b84. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1132.479795] env[68437]: DEBUG oslo_concurrency.lockutils [req-ccd814bd-dc27-4d96-a085-31378b3097c3 req-eaba5c35-3722-4537-9d56-9d42092e10ae service nova] Acquiring lock "refresh_cache-4fb5a384-0792-40df-b361-0784397a897f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.479927] env[68437]: DEBUG oslo_concurrency.lockutils [req-ccd814bd-dc27-4d96-a085-31378b3097c3 req-eaba5c35-3722-4537-9d56-9d42092e10ae service nova] Acquired lock "refresh_cache-4fb5a384-0792-40df-b361-0784397a897f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.480096] env[68437]: DEBUG nova.network.neutron [req-ccd814bd-dc27-4d96-a085-31378b3097c3 req-eaba5c35-3722-4537-9d56-9d42092e10ae service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Refreshing network info cache for port b35125a2-2df8-4a1d-8e2a-7330ff771b84 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1132.482607] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.734s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.488639] env[68437]: DEBUG oslo_vmware.api [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1132.488639] env[68437]: value = "task-2944890" [ 1132.488639] env[68437]: _type = "Task" [ 1132.488639] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.500113] env[68437]: DEBUG oslo_vmware.api [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944890, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.507378] env[68437]: INFO nova.scheduler.client.report [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Deleted allocations for instance cdb5b8d0-03ab-4020-a9aa-00688f7aef8e [ 1132.524096] env[68437]: DEBUG nova.compute.manager [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1132.736215] env[68437]: INFO nova.compute.manager [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Took 16.53 seconds to build instance. [ 1132.744411] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944889, 'name': CreateVM_Task, 'duration_secs': 0.319985} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.744636] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1132.745509] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.745750] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.746136] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1132.746433] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-319479ae-ac3b-45fc-9721-fc55658da047 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.752613] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1132.752613] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]524adcbb-0485-cf72-6d30-79598b3fe693" [ 1132.752613] env[68437]: _type = "Task" [ 1132.752613] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.760886] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524adcbb-0485-cf72-6d30-79598b3fe693, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.000206] env[68437]: DEBUG oslo_vmware.api [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944890, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.018704] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c1722ad9-a469-45e1-bff2-27b1c8acf226 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148 tempest-FloatingIPsAssociationNegativeTestJSON-1607824148-project-member] Lock "cdb5b8d0-03ab-4020-a9aa-00688f7aef8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.689s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.050697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.051071] env[68437]: DEBUG oslo_concurrency.lockutils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.053439] env[68437]: INFO nova.compute.claims [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1133.197294] env[68437]: DEBUG nova.network.neutron [req-ccd814bd-dc27-4d96-a085-31378b3097c3 req-eaba5c35-3722-4537-9d56-9d42092e10ae service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Updated VIF entry in instance network info cache for port b35125a2-2df8-4a1d-8e2a-7330ff771b84. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1133.197697] env[68437]: DEBUG nova.network.neutron [req-ccd814bd-dc27-4d96-a085-31378b3097c3 req-eaba5c35-3722-4537-9d56-9d42092e10ae service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Updating instance_info_cache with network_info: [{"id": "b35125a2-2df8-4a1d-8e2a-7330ff771b84", "address": "fa:16:3e:24:bf:88", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb35125a2-2d", "ovs_interfaceid": "b35125a2-2df8-4a1d-8e2a-7330ff771b84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.239036] env[68437]: DEBUG oslo_concurrency.lockutils [None req-708d4d5a-53e7-4022-8dcb-b9afea0b8a9b tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.037s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.264545] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]524adcbb-0485-cf72-6d30-79598b3fe693, 'name': SearchDatastore_Task, 'duration_secs': 0.01534} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.264869] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.265115] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1133.265367] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.265520] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.265715] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1133.265995] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a21a4931-ce39-480c-bd7b-865bbde05f16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.277721] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1133.277905] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1133.278989] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87125be5-38b0-43b7-9538-22de89fe0cac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.286012] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1133.286012] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bc7543-fa37-287b-18a7-0ac1139a76f5" [ 1133.286012] env[68437]: _type = "Task" [ 1133.286012] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.296046] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bc7543-fa37-287b-18a7-0ac1139a76f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.503037] env[68437]: DEBUG oslo_vmware.api [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944890, 'name': ReconfigVM_Task, 'duration_secs': 0.538813} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.503395] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfigured VM instance instance-0000005a to attach disk [datastore1] volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3/volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1133.508182] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44a2ffcb-5a24-43c3-bfa5-a8a99f644226 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.525421] env[68437]: DEBUG oslo_vmware.api [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1133.525421] env[68437]: value = "task-2944891" [ 1133.525421] env[68437]: _type = "Task" [ 1133.525421] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.534466] env[68437]: DEBUG oslo_vmware.api [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944891, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.701181] env[68437]: DEBUG oslo_concurrency.lockutils [req-ccd814bd-dc27-4d96-a085-31378b3097c3 req-eaba5c35-3722-4537-9d56-9d42092e10ae service nova] Releasing lock "refresh_cache-4fb5a384-0792-40df-b361-0784397a897f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.802019] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52bc7543-fa37-287b-18a7-0ac1139a76f5, 'name': SearchDatastore_Task, 'duration_secs': 0.016339} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.803370] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2937f64-561e-4239-b708-74f0a5a33460 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.810690] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1133.810690] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d43989-b530-866d-8100-dd5ff9066755" [ 1133.810690] env[68437]: _type = "Task" [ 1133.810690] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.820354] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d43989-b530-866d-8100-dd5ff9066755, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.040072] env[68437]: DEBUG oslo_vmware.api [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944891, 'name': ReconfigVM_Task, 'duration_secs': 0.168114} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.040072] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1134.256404] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d09eb3d-91bf-42ed-a455-46a9324bd6f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.264762] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be7194e-1431-4407-b075-8374e97d9915 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.297757] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df99b265-23da-4c0f-8549-69d994de6828 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.306010] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1ffca7-c9e8-403e-a912-d7a845ebf880 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.323575] env[68437]: DEBUG nova.compute.provider_tree [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.331146] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d43989-b530-866d-8100-dd5ff9066755, 'name': SearchDatastore_Task, 'duration_secs': 0.012785} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.331477] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.332392] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 4fb5a384-0792-40df-b361-0784397a897f/4fb5a384-0792-40df-b361-0784397a897f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1134.332392] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-169aafa0-1d0e-4241-942d-b93c7f83944c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.341971] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1134.341971] env[68437]: value = "task-2944892" [ 1134.341971] env[68437]: _type = "Task" [ 1134.341971] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.353209] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944892, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.459194] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.459194] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.829282] env[68437]: DEBUG nova.scheduler.client.report [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.853764] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944892, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.962156] env[68437]: DEBUG nova.compute.manager [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1135.102471] env[68437]: DEBUG nova.objects.instance [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lazy-loading 'flavor' on Instance uuid 76d97a56-21a2-4363-a987-ef872f056510 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.222202] env[68437]: INFO nova.compute.manager [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Rebuilding instance [ 1135.274884] env[68437]: DEBUG nova.compute.manager [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1135.275924] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932f136b-e5cd-460e-ab66-cb533878aebd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.334262] env[68437]: DEBUG oslo_concurrency.lockutils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.334736] env[68437]: DEBUG nova.compute.manager [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1135.353860] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944892, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569016} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.354391] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 4fb5a384-0792-40df-b361-0784397a897f/4fb5a384-0792-40df-b361-0784397a897f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1135.354745] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1135.355223] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63160b7f-51cb-476d-b95b-780b6afe5f76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.363032] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1135.363032] env[68437]: value = "task-2944893" [ 1135.363032] env[68437]: _type = "Task" [ 1135.363032] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.373494] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.484851] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.484851] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.486341] env[68437]: INFO nova.compute.claims [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1135.607849] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a833bded-0b05-49ce-8373-8ddf2bc1342f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.316s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.839369] env[68437]: DEBUG nova.compute.utils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1135.840752] env[68437]: DEBUG nova.compute.manager [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1135.841030] env[68437]: DEBUG nova.network.neutron [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1135.873295] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.159601} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.873585] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1135.874377] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f945e3fd-3654-4734-a90d-2aa586ac21af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.898126] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 4fb5a384-0792-40df-b361-0784397a897f/4fb5a384-0792-40df-b361-0784397a897f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1135.899656] env[68437]: DEBUG nova.policy [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cba27fb789842868ad9987dafa17900', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '508b51afac37437f82818180bd9c1c3b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1135.901694] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebb0a41f-a5d4-4ad2-be77-06020e5b0487 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.922887] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1135.922887] env[68437]: value = "task-2944894" [ 1135.922887] env[68437]: _type = "Task" [ 1135.922887] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.932409] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944894, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.996037] env[68437]: INFO nova.compute.manager [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Rebuilding instance [ 1136.043979] env[68437]: DEBUG nova.compute.manager [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1136.044896] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2c67f3-bcab-40d0-a3f3-23a88bedb9f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.213057] env[68437]: DEBUG nova.network.neutron [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Successfully created port: 9c369f5b-7c68-4b57-8c1e-cc2b007af652 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1136.292768] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.293349] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7709d259-4995-46ee-bc6b-0e72271f438f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.300907] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1136.300907] env[68437]: value = "task-2944895" [ 1136.300907] env[68437]: _type = "Task" [ 1136.300907] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.309302] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.344506] env[68437]: DEBUG nova.compute.manager [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1136.443670] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944894, 'name': ReconfigVM_Task, 'duration_secs': 0.297316} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.443670] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 4fb5a384-0792-40df-b361-0784397a897f/4fb5a384-0792-40df-b361-0784397a897f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.443670] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bf74baf-c7b0-4bca-82d9-58d2f62650f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.452015] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1136.452015] env[68437]: value = "task-2944896" [ 1136.452015] env[68437]: _type = "Task" [ 1136.452015] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.473710] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944896, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.697796] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d5e16e-3150-4be8-b389-1564ce5430cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.706392] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0703f217-dc6c-4e3c-9c6d-eae9f7c992ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.738270] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfaf40d-96a6-4fe9-b602-079c4e7e835d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.746088] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730bec5d-093b-4826-bbd6-bb42bcdfe07b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.759754] env[68437]: DEBUG nova.compute.provider_tree [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.811549] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944895, 'name': PowerOffVM_Task, 'duration_secs': 0.355411} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.811775] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1136.812019] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1136.812871] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b030c6e-d39a-438f-bf63-239023cee548 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.819498] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1136.819729] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f306e2a-84df-450e-b3bd-6a5c15dcc315 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.836024] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "b4257b79-2723-43fd-b64f-74104802e048" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.836389] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "b4257b79-2723-43fd-b64f-74104802e048" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.851070] env[68437]: INFO nova.virt.block_device [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Booting with volume 32c3b984-3df2-4cf5-8349-d6fc830a6ed3 at /dev/sda [ 1136.890776] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1136.891017] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1136.891218] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleting the datastore file [datastore2] 4abf1477-2f0e-4a13-884a-c19420b3e435 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.892623] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbbff5a0-26f1-4f8a-b3c7-c060e66461c6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.895446] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b03d946a-05bf-41f2-94f5-b2edd56f10f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.908703] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74995dab-e4c9-4fad-8f68-0fe400e400ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.923474] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1136.923474] env[68437]: value = "task-2944898" [ 1136.923474] env[68437]: _type = "Task" [ 1136.923474] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.935528] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944898, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.957332] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-290dd90d-00eb-438a-8590-42e7b233a4e3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.967813] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944896, 'name': Rename_Task, 'duration_secs': 0.163643} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.969617] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1136.970467] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25109aba-c2f5-4e3e-a1ae-1bcd8f8034f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.977350] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68329351-2da3-4d52-b2ac-b6c6ee9e3c63 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.004731] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1137.004731] env[68437]: value = "task-2944899" [ 1137.004731] env[68437]: _type = "Task" [ 1137.004731] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.012348] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.023223] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79d0ce2-9907-4f81-a6ca-eaf432273d31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.029708] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2441c62a-6c9d-44d4-a986-e48a1b9ab6a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.039387] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "6fabc758-0d56-4adb-a54e-b9c8798a0151" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.039727] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "6fabc758-0d56-4adb-a54e-b9c8798a0151" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.044509] env[68437]: DEBUG nova.virt.block_device [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Updating existing volume attachment record: 91c3ce9f-b734-4004-bd32-07b999fd3a5f {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1137.063965] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1137.064179] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f9abe65-953c-4b40-b9cd-2c0d30f9f51f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.072268] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1137.072268] env[68437]: value = "task-2944900" [ 1137.072268] env[68437]: _type = "Task" [ 1137.072268] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.081140] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.263613] env[68437]: DEBUG nova.scheduler.client.report [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.338653] env[68437]: DEBUG nova.compute.manager [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1137.433475] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944898, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.297061} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.433687] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.433860] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1137.434050] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1137.515179] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944899, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.547983] env[68437]: DEBUG nova.compute.manager [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1137.583048] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944900, 'name': PowerOffVM_Task, 'duration_secs': 0.401894} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.583382] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1137.640453] env[68437]: INFO nova.compute.manager [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Detaching volume 29729ce2-5d97-4553-9b4b-d02c045e2fb3 [ 1137.660344] env[68437]: DEBUG nova.compute.manager [req-caca3eab-67e2-4d5c-a030-c870247efd66 req-e1b487a7-3d77-4adf-809d-8678622ad1be service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Received event network-vif-plugged-9c369f5b-7c68-4b57-8c1e-cc2b007af652 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1137.660658] env[68437]: DEBUG oslo_concurrency.lockutils [req-caca3eab-67e2-4d5c-a030-c870247efd66 req-e1b487a7-3d77-4adf-809d-8678622ad1be service nova] Acquiring lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.660832] env[68437]: DEBUG oslo_concurrency.lockutils [req-caca3eab-67e2-4d5c-a030-c870247efd66 req-e1b487a7-3d77-4adf-809d-8678622ad1be service nova] Lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.661199] env[68437]: DEBUG oslo_concurrency.lockutils [req-caca3eab-67e2-4d5c-a030-c870247efd66 req-e1b487a7-3d77-4adf-809d-8678622ad1be service nova] Lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.661395] env[68437]: DEBUG nova.compute.manager [req-caca3eab-67e2-4d5c-a030-c870247efd66 req-e1b487a7-3d77-4adf-809d-8678622ad1be service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] No waiting events found dispatching network-vif-plugged-9c369f5b-7c68-4b57-8c1e-cc2b007af652 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1137.661574] env[68437]: WARNING nova.compute.manager [req-caca3eab-67e2-4d5c-a030-c870247efd66 req-e1b487a7-3d77-4adf-809d-8678622ad1be service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Received unexpected event network-vif-plugged-9c369f5b-7c68-4b57-8c1e-cc2b007af652 for instance with vm_state building and task_state block_device_mapping. [ 1137.676073] env[68437]: INFO nova.virt.block_device [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Attempting to driver detach volume 29729ce2-5d97-4553-9b4b-d02c045e2fb3 from mountpoint /dev/sdb [ 1137.676339] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1137.676524] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1137.677482] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f8f6fa-02c2-44f7-aea3-637d73206356 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.701992] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b816aa1-4e5b-44d6-855b-2e90edaa8222 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.709042] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de593a82-bde4-4de5-9778-51a6a1171b6a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.729636] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf257946-92dd-4766-b5e6-ab8e35e11c82 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.744183] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] The volume has not been displaced from its original location: [datastore1] volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3/volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1137.749353] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1137.750049] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b9d7d66-a2f4-4726-bcb6-8199ddf94500 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.763133] env[68437]: DEBUG nova.network.neutron [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Successfully updated port: 9c369f5b-7c68-4b57-8c1e-cc2b007af652 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1137.768947] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1137.768947] env[68437]: value = "task-2944901" [ 1137.768947] env[68437]: _type = "Task" [ 1137.768947] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.769714] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.285s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.770225] env[68437]: DEBUG nova.compute.manager [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1137.781738] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944901, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.862927] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.863333] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.864960] env[68437]: INFO nova.compute.claims [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1138.015085] env[68437]: DEBUG oslo_vmware.api [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944899, 'name': PowerOnVM_Task, 'duration_secs': 1.003677} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.015393] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1138.015657] env[68437]: INFO nova.compute.manager [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Took 7.73 seconds to spawn the instance on the hypervisor. [ 1138.015846] env[68437]: DEBUG nova.compute.manager [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1138.016738] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0047edb-6271-4268-8330-7d26a83cba31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.067265] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.265814] env[68437]: DEBUG oslo_concurrency.lockutils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Acquiring lock "refresh_cache-9b98ff24-e9d1-4754-89d2-ee2daa54ad47" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.265956] env[68437]: DEBUG oslo_concurrency.lockutils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Acquired lock "refresh_cache-9b98ff24-e9d1-4754-89d2-ee2daa54ad47" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.266134] env[68437]: DEBUG nova.network.neutron [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1138.280742] env[68437]: DEBUG nova.compute.utils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1138.282299] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944901, 'name': ReconfigVM_Task, 'duration_secs': 0.315318} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.282759] env[68437]: DEBUG nova.compute.manager [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1138.282928] env[68437]: DEBUG nova.network.neutron [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1138.285774] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1138.290592] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e644122d-36ba-4c47-a82d-14a42df2d6e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.307935] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1138.307935] env[68437]: value = "task-2944902" [ 1138.307935] env[68437]: _type = "Task" [ 1138.307935] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.318021] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944902, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.341677] env[68437]: DEBUG nova.policy [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f745cbd7edb641af8623447b00021ac6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b7dfebb79e54e4fba7e0b142f99d7eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1138.472847] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1138.473167] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1138.473363] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1138.473571] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1138.473743] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1138.473908] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1138.474168] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1138.474357] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1138.474535] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1138.474727] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1138.474919] env[68437]: DEBUG nova.virt.hardware [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1138.475966] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875fb6c3-65f1-48ab-b5ba-5df285c79c02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.484544] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7abab29-8c47-4a84-8b76-045b4e89cf1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.499090] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:d4:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c58ce980-01f0-476a-b297-adac9a7fcdef', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.508064] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1138.508064] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1138.508064] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4b254b8-b89b-4175-ae0d-535a67b73438 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.528362] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.528362] env[68437]: value = "task-2944903" [ 1138.528362] env[68437]: _type = "Task" [ 1138.528362] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.541723] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944903, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.542309] env[68437]: INFO nova.compute.manager [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Took 12.55 seconds to build instance. [ 1138.633594] env[68437]: DEBUG nova.network.neutron [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Successfully created port: 7e45a17c-73eb-46e6-be1e-b49fc753eb9b {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1138.783790] env[68437]: DEBUG nova.compute.manager [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1138.798481] env[68437]: DEBUG nova.network.neutron [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1138.818644] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944902, 'name': ReconfigVM_Task, 'duration_secs': 0.239376} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.818968] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1138.945059] env[68437]: DEBUG nova.network.neutron [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Updating instance_info_cache with network_info: [{"id": "9c369f5b-7c68-4b57-8c1e-cc2b007af652", "address": "fa:16:3e:d6:01:fa", "network": {"id": "1d1d8521-0bd8-413f-a787-780c6555502e", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-525691127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "508b51afac37437f82818180bd9c1c3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c369f5b-7c", "ovs_interfaceid": "9c369f5b-7c68-4b57-8c1e-cc2b007af652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.949067] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "4fb5a384-0792-40df-b361-0784397a897f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.041028] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944903, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.044754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d3164f6b-f88a-40da-8a24-886e4b289bf6 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "4fb5a384-0792-40df-b361-0784397a897f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.062s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.047573] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "4fb5a384-0792-40df-b361-0784397a897f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.099s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.047774] env[68437]: DEBUG nova.compute.manager [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1139.048829] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e30201-7da7-47b7-98a7-84b0bfb78a90 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.055137] env[68437]: DEBUG nova.compute.manager [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1139.055741] env[68437]: DEBUG nova.objects.instance [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'flavor' on Instance uuid 4fb5a384-0792-40df-b361-0784397a897f {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.070465] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4492ea-c410-4415-acad-27876bea5173 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.078500] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a42c0b8-5e0d-4523-81a1-88dcc8ea6799 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.108968] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbf9a72-6ff8-4ca8-b138-61eb46c80be6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.116150] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ac439b-c158-466c-b7be-80b958bec4f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.129895] env[68437]: DEBUG nova.compute.provider_tree [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.137673] env[68437]: DEBUG nova.compute.manager [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1139.138189] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.138389] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.138700] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.138700] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.138925] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.138976] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.139190] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.139349] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.139514] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.139672] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.139844] env[68437]: DEBUG nova.virt.hardware [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.141241] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68358105-1fc2-45a8-adfa-5f075018317b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.151186] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43c9150-e1e5-4078-a95f-c41bc5a8f516 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.448842] env[68437]: DEBUG oslo_concurrency.lockutils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Releasing lock "refresh_cache-9b98ff24-e9d1-4754-89d2-ee2daa54ad47" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1139.448842] env[68437]: DEBUG nova.compute.manager [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Instance network_info: |[{"id": "9c369f5b-7c68-4b57-8c1e-cc2b007af652", "address": "fa:16:3e:d6:01:fa", "network": {"id": "1d1d8521-0bd8-413f-a787-780c6555502e", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-525691127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "508b51afac37437f82818180bd9c1c3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c369f5b-7c", "ovs_interfaceid": "9c369f5b-7c68-4b57-8c1e-cc2b007af652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1139.449247] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:01:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c369f5b-7c68-4b57-8c1e-cc2b007af652', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1139.456457] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Creating folder: Project (508b51afac37437f82818180bd9c1c3b). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1139.456714] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-207392c1-42ed-43e4-b3c0-c8fe4a45a277 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.470094] env[68437]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1139.470250] env[68437]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68437) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1139.470705] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Folder already exists: Project (508b51afac37437f82818180bd9c1c3b). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1139.470896] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Creating folder: Instances. Parent ref: group-v591105. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1139.471167] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d86d238f-96f1-4e9c-81f7-31f92364b250 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.482988] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Created folder: Instances in parent group-v591105. [ 1139.483285] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1139.483659] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1139.483659] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8519cf2f-b36f-46e9-9541-80741e59b331 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.503064] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1139.503064] env[68437]: value = "task-2944906" [ 1139.503064] env[68437]: _type = "Task" [ 1139.503064] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.512784] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944906, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.538487] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944903, 'name': CreateVM_Task, 'duration_secs': 0.695101} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.538696] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1139.539463] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.539642] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.540028] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1139.540307] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-763aa1fe-bd69-49c0-a56a-2f0badc1f1a4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.545017] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1139.545017] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d3eee7-ec72-2137-7209-4287f689f062" [ 1139.545017] env[68437]: _type = "Task" [ 1139.545017] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.553148] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d3eee7-ec72-2137-7209-4287f689f062, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.633014] env[68437]: DEBUG nova.scheduler.client.report [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.687069] env[68437]: DEBUG nova.compute.manager [req-2dc35c79-2070-48b4-a8e1-84452a9c7985 req-32bb8011-4e50-4bbe-80fd-bcf1529dbd5c service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Received event network-changed-9c369f5b-7c68-4b57-8c1e-cc2b007af652 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1139.687069] env[68437]: DEBUG nova.compute.manager [req-2dc35c79-2070-48b4-a8e1-84452a9c7985 req-32bb8011-4e50-4bbe-80fd-bcf1529dbd5c service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Refreshing instance network info cache due to event network-changed-9c369f5b-7c68-4b57-8c1e-cc2b007af652. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1139.687374] env[68437]: DEBUG oslo_concurrency.lockutils [req-2dc35c79-2070-48b4-a8e1-84452a9c7985 req-32bb8011-4e50-4bbe-80fd-bcf1529dbd5c service nova] Acquiring lock "refresh_cache-9b98ff24-e9d1-4754-89d2-ee2daa54ad47" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.687404] env[68437]: DEBUG oslo_concurrency.lockutils [req-2dc35c79-2070-48b4-a8e1-84452a9c7985 req-32bb8011-4e50-4bbe-80fd-bcf1529dbd5c service nova] Acquired lock "refresh_cache-9b98ff24-e9d1-4754-89d2-ee2daa54ad47" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.687596] env[68437]: DEBUG nova.network.neutron [req-2dc35c79-2070-48b4-a8e1-84452a9c7985 req-32bb8011-4e50-4bbe-80fd-bcf1529dbd5c service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Refreshing network info cache for port 9c369f5b-7c68-4b57-8c1e-cc2b007af652 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1139.796020] env[68437]: DEBUG nova.compute.manager [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1139.822206] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.822470] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.822667] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.822801] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.822954] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.823134] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.823366] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.823530] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.823698] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.823860] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.824041] env[68437]: DEBUG nova.virt.hardware [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.825291] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a156ac-9830-4c12-8e47-367e2c416dad {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.833027] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2839fcdb-8cd9-47b9-b045-6e85efca71b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.874266] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1139.874599] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77b52e7c-7cda-4a7a-abd7-ae7b3bbb7d66 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.881679] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1139.881679] env[68437]: value = "task-2944907" [ 1139.881679] env[68437]: _type = "Task" [ 1139.881679] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.889914] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944907, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.012550] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944906, 'name': CreateVM_Task, 'duration_secs': 0.303972} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.012776] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1140.013507] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '91c3ce9f-b734-4004-bd32-07b999fd3a5f', 'device_type': None, 'mount_device': '/dev/sda', 'boot_index': 0, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591112', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'name': 'volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9b98ff24-e9d1-4754-89d2-ee2daa54ad47', 'attached_at': '', 'detached_at': '', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'serial': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3'}, 'delete_on_termination': True, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68437) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1140.013710] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Root volume attach. Driver type: vmdk {{(pid=68437) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1140.014630] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba30e04-1e03-42dd-bef2-9dedece449c7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.021787] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4e51ca-0e69-4ee9-9038-e2520e370ce0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.027732] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2343d77c-4839-435b-b122-ecb0ea6017b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.033272] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-427e368f-f6e2-44fe-9ebb-64cae4880807 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.040046] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1140.040046] env[68437]: value = "task-2944908" [ 1140.040046] env[68437]: _type = "Task" [ 1140.040046] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.047447] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944908, 'name': RelocateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.055012] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d3eee7-ec72-2137-7209-4287f689f062, 'name': SearchDatastore_Task, 'duration_secs': 0.01073} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.055294] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.055537] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1140.055877] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.055936] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.056084] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1140.056329] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b7ad868-a78f-4052-9241-211837f098bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.062588] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1140.062889] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34b7daba-13cf-44a4-aaaa-ca0278782697 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.065420] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.065579] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1140.066253] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-358c3d47-7edf-461d-a839-182deaeb43c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.069288] env[68437]: DEBUG oslo_vmware.api [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1140.069288] env[68437]: value = "task-2944909" [ 1140.069288] env[68437]: _type = "Task" [ 1140.069288] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.073531] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1140.073531] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52612071-9e99-3f85-c259-2d729fafd84b" [ 1140.073531] env[68437]: _type = "Task" [ 1140.073531] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.079934] env[68437]: DEBUG oslo_vmware.api [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944909, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.084536] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52612071-9e99-3f85-c259-2d729fafd84b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.102180] env[68437]: DEBUG nova.network.neutron [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Successfully updated port: 7e45a17c-73eb-46e6-be1e-b49fc753eb9b {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.138542] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.275s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.139104] env[68437]: DEBUG nova.compute.manager [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1140.141616] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.075s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.142998] env[68437]: INFO nova.compute.claims [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1140.392037] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1140.392173] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1140.392400] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1140.393158] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3353907d-abf3-445c-8f5f-1017ec926f08 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.413733] env[68437]: DEBUG nova.network.neutron [req-2dc35c79-2070-48b4-a8e1-84452a9c7985 req-32bb8011-4e50-4bbe-80fd-bcf1529dbd5c service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Updated VIF entry in instance network info cache for port 9c369f5b-7c68-4b57-8c1e-cc2b007af652. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1140.414027] env[68437]: DEBUG nova.network.neutron [req-2dc35c79-2070-48b4-a8e1-84452a9c7985 req-32bb8011-4e50-4bbe-80fd-bcf1529dbd5c service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Updating instance_info_cache with network_info: [{"id": "9c369f5b-7c68-4b57-8c1e-cc2b007af652", "address": "fa:16:3e:d6:01:fa", "network": {"id": "1d1d8521-0bd8-413f-a787-780c6555502e", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-525691127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "508b51afac37437f82818180bd9c1c3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c369f5b-7c", "ovs_interfaceid": "9c369f5b-7c68-4b57-8c1e-cc2b007af652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.415774] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163ec14b-ec45-4b55-a7ae-cb16eafd9e81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.423278] env[68437]: WARNING nova.virt.vmwareapi.driver [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1140.423653] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1140.424735] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94ff3c8-5ef6-4694-ab07-41d331f20c87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.433585] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1140.433784] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa209d04-d148-43eb-9763-d2a60c2514ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.502068] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1140.502387] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1140.502452] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleting the datastore file [datastore1] 76d97a56-21a2-4363-a987-ef872f056510 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1140.502705] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdb4a21b-8831-41e1-9c65-0548ea94152f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.509790] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1140.509790] env[68437]: value = "task-2944911" [ 1140.509790] env[68437]: _type = "Task" [ 1140.509790] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.518576] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.550256] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944908, 'name': RelocateVM_Task} progress is 42%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.580424] env[68437]: DEBUG oslo_vmware.api [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944909, 'name': PowerOffVM_Task, 'duration_secs': 0.183389} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.581176] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1140.581413] env[68437]: DEBUG nova.compute.manager [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1140.582224] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c07194-bab7-4ba2-b90b-d9e229a70d5f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.588323] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52612071-9e99-3f85-c259-2d729fafd84b, 'name': SearchDatastore_Task, 'duration_secs': 0.015577} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.589481] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c3ee4f7-7469-4a04-92e8-db5afed4667c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.598599] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1140.598599] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5254c42e-8fb3-ecda-61dd-8b3a05cdd5d5" [ 1140.598599] env[68437]: _type = "Task" [ 1140.598599] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.606759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "refresh_cache-76ed714d-7ffe-4a64-ae78-bab76ba1504a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.606895] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "refresh_cache-76ed714d-7ffe-4a64-ae78-bab76ba1504a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.607076] env[68437]: DEBUG nova.network.neutron [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1140.608274] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5254c42e-8fb3-ecda-61dd-8b3a05cdd5d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.647686] env[68437]: DEBUG nova.compute.utils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1140.651908] env[68437]: DEBUG nova.compute.manager [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1140.919448] env[68437]: DEBUG oslo_concurrency.lockutils [req-2dc35c79-2070-48b4-a8e1-84452a9c7985 req-32bb8011-4e50-4bbe-80fd-bcf1529dbd5c service nova] Releasing lock "refresh_cache-9b98ff24-e9d1-4754-89d2-ee2daa54ad47" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.020208] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944911, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213827} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.020467] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1141.020613] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1141.020792] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1141.051027] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944908, 'name': RelocateVM_Task} progress is 56%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.102391] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6e9f7cdd-bf0d-48d3-b8df-d4378453722b tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "4fb5a384-0792-40df-b361-0784397a897f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.109283] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5254c42e-8fb3-ecda-61dd-8b3a05cdd5d5, 'name': SearchDatastore_Task, 'duration_secs': 0.015742} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.111445] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.111717] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 4abf1477-2f0e-4a13-884a-c19420b3e435/4abf1477-2f0e-4a13-884a-c19420b3e435.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1141.112009] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed0fe01f-834d-4530-883a-c4b842359d62 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.120703] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1141.120703] env[68437]: value = "task-2944912" [ 1141.120703] env[68437]: _type = "Task" [ 1141.120703] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.130382] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944912, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.144043] env[68437]: DEBUG nova.network.neutron [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1141.152430] env[68437]: DEBUG nova.compute.manager [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1141.302453] env[68437]: DEBUG nova.network.neutron [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Updating instance_info_cache with network_info: [{"id": "7e45a17c-73eb-46e6-be1e-b49fc753eb9b", "address": "fa:16:3e:ff:aa:b8", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e45a17c-73", "ovs_interfaceid": "7e45a17c-73eb-46e6-be1e-b49fc753eb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.350113] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "4fb5a384-0792-40df-b361-0784397a897f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.350398] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "4fb5a384-0792-40df-b361-0784397a897f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.350609] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "4fb5a384-0792-40df-b361-0784397a897f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.350792] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "4fb5a384-0792-40df-b361-0784397a897f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.350959] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "4fb5a384-0792-40df-b361-0784397a897f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.353447] env[68437]: INFO nova.compute.manager [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Terminating instance [ 1141.381226] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c22f755-a35c-4645-8831-7f0374e3178f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.389790] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fd63e0-50c5-4e2c-892f-aa5c7c587631 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.421845] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5698f099-b0a7-4fb0-9d39-362928375c31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.429756] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30967cf-a261-4f2f-8cdc-1f94b289171d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.444098] env[68437]: DEBUG nova.compute.provider_tree [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1141.525989] env[68437]: INFO nova.virt.block_device [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Booting with volume 29729ce2-5d97-4553-9b4b-d02c045e2fb3 at /dev/sdb [ 1141.553823] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944908, 'name': RelocateVM_Task} progress is 69%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.570366] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a3f5c00-91aa-4dfd-a359-b0dc91dffaba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.581906] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90f55c8-49f9-4a15-be95-82daf10edbc1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.620611] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e02f39d0-e1be-4a46-abbc-b38348499d8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.632929] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944912, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.637416] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836efc01-44db-4d10-95f9-86bac691b990 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.676264] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138c03e5-6f88-40aa-97c4-3286ec0e3812 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.686680] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91b1955-9006-41b6-bcbd-c456c4fe90e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.702477] env[68437]: DEBUG nova.virt.block_device [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Updating existing volume attachment record: 2ac203cb-56bf-464d-a014-e9ff04d2348e {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1141.728233] env[68437]: DEBUG nova.compute.manager [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Received event network-vif-plugged-7e45a17c-73eb-46e6-be1e-b49fc753eb9b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1141.728535] env[68437]: DEBUG oslo_concurrency.lockutils [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] Acquiring lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.728664] env[68437]: DEBUG oslo_concurrency.lockutils [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] Lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.728837] env[68437]: DEBUG oslo_concurrency.lockutils [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] Lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.729046] env[68437]: DEBUG nova.compute.manager [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] No waiting events found dispatching network-vif-plugged-7e45a17c-73eb-46e6-be1e-b49fc753eb9b {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1141.729504] env[68437]: WARNING nova.compute.manager [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Received unexpected event network-vif-plugged-7e45a17c-73eb-46e6-be1e-b49fc753eb9b for instance with vm_state building and task_state spawning. [ 1141.729504] env[68437]: DEBUG nova.compute.manager [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Received event network-changed-7e45a17c-73eb-46e6-be1e-b49fc753eb9b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1141.729504] env[68437]: DEBUG nova.compute.manager [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Refreshing instance network info cache due to event network-changed-7e45a17c-73eb-46e6-be1e-b49fc753eb9b. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1141.730272] env[68437]: DEBUG oslo_concurrency.lockutils [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] Acquiring lock "refresh_cache-76ed714d-7ffe-4a64-ae78-bab76ba1504a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.805355] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "refresh_cache-76ed714d-7ffe-4a64-ae78-bab76ba1504a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.805875] env[68437]: DEBUG nova.compute.manager [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Instance network_info: |[{"id": "7e45a17c-73eb-46e6-be1e-b49fc753eb9b", "address": "fa:16:3e:ff:aa:b8", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e45a17c-73", "ovs_interfaceid": "7e45a17c-73eb-46e6-be1e-b49fc753eb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1141.806332] env[68437]: DEBUG oslo_concurrency.lockutils [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] Acquired lock "refresh_cache-76ed714d-7ffe-4a64-ae78-bab76ba1504a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.806536] env[68437]: DEBUG nova.network.neutron [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Refreshing network info cache for port 7e45a17c-73eb-46e6-be1e-b49fc753eb9b {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1141.807954] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:aa:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e45a17c-73eb-46e6-be1e-b49fc753eb9b', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1141.817383] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.817987] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1141.818789] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18cb765b-1f99-409d-990d-22f2cd198447 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.842420] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1141.842420] env[68437]: value = "task-2944913" [ 1141.842420] env[68437]: _type = "Task" [ 1141.842420] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.855969] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944913, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.860396] env[68437]: DEBUG nova.compute.manager [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1141.860646] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1141.862244] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ded1e0-8a8c-4c1a-a34e-327f6558dedc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.871323] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1141.871740] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5c16b79c-882d-4986-9a53-25ca1a7f0ddb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.946986] env[68437]: DEBUG nova.scheduler.client.report [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1141.950913] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1141.951164] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1141.951703] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleting the datastore file [datastore1] 4fb5a384-0792-40df-b361-0784397a897f {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1141.952240] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-401eb06d-264d-44a4-a322-f0432439e8c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.959063] env[68437]: DEBUG oslo_vmware.api [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1141.959063] env[68437]: value = "task-2944915" [ 1141.959063] env[68437]: _type = "Task" [ 1141.959063] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.969138] env[68437]: DEBUG oslo_vmware.api [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.053019] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944908, 'name': RelocateVM_Task} progress is 86%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.133916] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944912, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.778249} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.134326] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 4abf1477-2f0e-4a13-884a-c19420b3e435/4abf1477-2f0e-4a13-884a-c19420b3e435.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1142.134639] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1142.134929] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45a102bc-6ab3-448e-a924-1a7d6bc7b238 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.142300] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1142.142300] env[68437]: value = "task-2944916" [ 1142.142300] env[68437]: _type = "Task" [ 1142.142300] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.152365] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944916, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.177014] env[68437]: DEBUG nova.compute.manager [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1142.206149] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1142.206431] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1142.206589] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1142.206773] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1142.206913] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1142.207074] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1142.207295] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1142.207456] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1142.207626] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1142.207866] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1142.208156] env[68437]: DEBUG nova.virt.hardware [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1142.209202] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0560318a-de90-4f7f-9ace-670aee731f0a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.217380] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55f3ece-a370-4d98-8f5f-2e0326f93c93 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.233055] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1142.239068] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Creating folder: Project (5245284a79bd451bad6eea4c6bf794ee). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1142.239251] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87576ecd-d445-4079-b284-d1e4aee14dd5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.252481] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Created folder: Project (5245284a79bd451bad6eea4c6bf794ee) in parent group-v590848. [ 1142.252699] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Creating folder: Instances. Parent ref: group-v591126. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1142.253214] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5378f0ce-d7b3-4374-a3ef-d48ea12614f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.264510] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Created folder: Instances in parent group-v591126. [ 1142.264750] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1142.264941] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4257b79-2723-43fd-b64f-74104802e048] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1142.265434] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35a874f8-9e5a-45d3-9fe4-d3c118d3ffa4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.281836] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1142.281836] env[68437]: value = "task-2944919" [ 1142.281836] env[68437]: _type = "Task" [ 1142.281836] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.289013] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944919, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.355259] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944913, 'name': CreateVM_Task, 'duration_secs': 0.384494} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.355465] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1142.357078] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.357078] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.357078] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1142.357471] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28329425-e87d-4146-8f3b-7ff809cf9695 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.362608] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1142.362608] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52876403-4a0f-620a-818b-4c05f2c2f283" [ 1142.362608] env[68437]: _type = "Task" [ 1142.362608] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.370864] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52876403-4a0f-620a-818b-4c05f2c2f283, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.453386] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.453996] env[68437]: DEBUG nova.compute.manager [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1142.473303] env[68437]: DEBUG oslo_vmware.api [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195506} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.473580] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.473987] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1142.473987] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1142.474379] env[68437]: INFO nova.compute.manager [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1142.475040] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1142.475040] env[68437]: DEBUG nova.compute.manager [-] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1142.475040] env[68437]: DEBUG nova.network.neutron [-] [instance: 4fb5a384-0792-40df-b361-0784397a897f] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1142.534181] env[68437]: DEBUG nova.network.neutron [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Updated VIF entry in instance network info cache for port 7e45a17c-73eb-46e6-be1e-b49fc753eb9b. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1142.534588] env[68437]: DEBUG nova.network.neutron [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Updating instance_info_cache with network_info: [{"id": "7e45a17c-73eb-46e6-be1e-b49fc753eb9b", "address": "fa:16:3e:ff:aa:b8", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e45a17c-73", "ovs_interfaceid": "7e45a17c-73eb-46e6-be1e-b49fc753eb9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.554973] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944908, 'name': RelocateVM_Task} progress is 97%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.656042] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944916, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096472} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.656042] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1142.656042] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da75b7b-f1cb-492f-8c39-e6d3544cc76e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.676280] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 4abf1477-2f0e-4a13-884a-c19420b3e435/4abf1477-2f0e-4a13-884a-c19420b3e435.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.676751] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21eea238-e350-4f41-b4a7-e510a9a81a9c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.701018] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1142.701018] env[68437]: value = "task-2944920" [ 1142.701018] env[68437]: _type = "Task" [ 1142.701018] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.707833] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944920, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.790824] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944919, 'name': CreateVM_Task, 'duration_secs': 0.343265} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.790979] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b4257b79-2723-43fd-b64f-74104802e048] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1142.791405] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.873797] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52876403-4a0f-620a-818b-4c05f2c2f283, 'name': SearchDatastore_Task, 'duration_secs': 0.033729} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.874315] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.874397] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1142.874665] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.874742] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.874865] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1142.875172] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.875697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1142.875924] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-caa39ac4-1a04-4154-80d6-8ee4102b1a62 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.877675] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ddb1b4f-bb8e-4cec-8a16-3e2ede2823d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.885179] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1142.885179] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526b47b4-3166-a10e-d1e5-f42693db55d9" [ 1142.885179] env[68437]: _type = "Task" [ 1142.885179] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.888825] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1142.889011] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1142.890559] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a0978a0-84aa-4c21-b6e9-9aedbe076f70 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.896031] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526b47b4-3166-a10e-d1e5-f42693db55d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.899856] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1142.899856] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525de46e-e6e9-80a3-88c4-a178b73696f0" [ 1142.899856] env[68437]: _type = "Task" [ 1142.899856] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.901987] env[68437]: DEBUG nova.compute.manager [req-faa888f6-3fd5-4b8b-bac1-dfef26721d79 req-197b2392-cf03-4c97-8fb4-31d3af1b1cde service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Received event network-vif-deleted-b35125a2-2df8-4a1d-8e2a-7330ff771b84 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1142.902152] env[68437]: INFO nova.compute.manager [req-faa888f6-3fd5-4b8b-bac1-dfef26721d79 req-197b2392-cf03-4c97-8fb4-31d3af1b1cde service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Neutron deleted interface b35125a2-2df8-4a1d-8e2a-7330ff771b84; detaching it from the instance and deleting it from the info cache [ 1142.902363] env[68437]: DEBUG nova.network.neutron [req-faa888f6-3fd5-4b8b-bac1-dfef26721d79 req-197b2392-cf03-4c97-8fb4-31d3af1b1cde service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.911764] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525de46e-e6e9-80a3-88c4-a178b73696f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.960122] env[68437]: DEBUG nova.compute.utils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1142.961608] env[68437]: DEBUG nova.compute.manager [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Not allocating networking since 'none' was specified. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1143.037031] env[68437]: DEBUG oslo_concurrency.lockutils [req-da279499-d31b-4332-8f41-265b0bf33e0a req-ba246ed2-3ba8-4051-b85d-664c16958dcd service nova] Releasing lock "refresh_cache-76ed714d-7ffe-4a64-ae78-bab76ba1504a" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.054241] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944908, 'name': RelocateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.208530] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944920, 'name': ReconfigVM_Task, 'duration_secs': 0.342621} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.208890] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 4abf1477-2f0e-4a13-884a-c19420b3e435/4abf1477-2f0e-4a13-884a-c19420b3e435.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.209554] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f269f112-045d-45d5-bfde-08417786bd38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.214852] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1143.214852] env[68437]: value = "task-2944921" [ 1143.214852] env[68437]: _type = "Task" [ 1143.214852] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.221675] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944921, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.381642] env[68437]: DEBUG nova.network.neutron [-] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.398971] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526b47b4-3166-a10e-d1e5-f42693db55d9, 'name': SearchDatastore_Task, 'duration_secs': 0.015536} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.399399] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.399721] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1143.400013] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.407787] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28bb017c-4a1e-4acf-9f27-5dcfb066eb45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.414971] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525de46e-e6e9-80a3-88c4-a178b73696f0, 'name': SearchDatastore_Task, 'duration_secs': 0.015594} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.416394] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2c21c9f-b721-46d6-bd41-1556a1a33105 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.423743] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fec5b54-9db1-40f2-998a-fb05eecbbd72 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.434889] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1143.434889] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d9cb38-a5ec-cea6-7d89-fabc1ec9d6fe" [ 1143.434889] env[68437]: _type = "Task" [ 1143.434889] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.442370] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d9cb38-a5ec-cea6-7d89-fabc1ec9d6fe, 'name': SearchDatastore_Task, 'duration_secs': 0.009866} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.442587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.442830] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 76ed714d-7ffe-4a64-ae78-bab76ba1504a/76ed714d-7ffe-4a64-ae78-bab76ba1504a.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1143.443112] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.443292] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.443481] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-639ef582-76ae-4f98-9482-6d565be0f6e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.445088] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b150aa52-22ab-407c-8a22-e16f9dd5282c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.456825] env[68437]: DEBUG nova.compute.manager [req-faa888f6-3fd5-4b8b-bac1-dfef26721d79 req-197b2392-cf03-4c97-8fb4-31d3af1b1cde service nova] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Detach interface failed, port_id=b35125a2-2df8-4a1d-8e2a-7330ff771b84, reason: Instance 4fb5a384-0792-40df-b361-0784397a897f could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1143.461594] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1143.461594] env[68437]: value = "task-2944922" [ 1143.461594] env[68437]: _type = "Task" [ 1143.461594] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.462074] env[68437]: DEBUG nova.compute.manager [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1143.471927] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.477011] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.477214] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1143.477917] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bd3f41a-0fcf-49b3-945b-b8bd7f97e3cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.482479] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1143.482479] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52de7e9f-3a29-9821-188a-e5be9c87b72e" [ 1143.482479] env[68437]: _type = "Task" [ 1143.482479] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.490897] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52de7e9f-3a29-9821-188a-e5be9c87b72e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.554608] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944908, 'name': RelocateVM_Task, 'duration_secs': 3.03209} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.554971] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1143.555180] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591112', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'name': 'volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9b98ff24-e9d1-4754-89d2-ee2daa54ad47', 'attached_at': '', 'detached_at': '', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'serial': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1143.556105] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2141a144-8377-4163-b20a-349c8ac366fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.572808] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa78804b-af66-4f77-95be-aed60fbb8038 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.593929] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3/volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1143.594227] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a4dc10f-60e6-4824-a2e2-f60b38bb18b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.612915] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1143.612915] env[68437]: value = "task-2944923" [ 1143.612915] env[68437]: _type = "Task" [ 1143.612915] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.620354] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944923, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.725334] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944921, 'name': Rename_Task, 'duration_secs': 0.134877} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.725612] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1143.725859] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-404f1725-6fef-4fb7-aa1e-e294cf32d96f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.733706] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1143.733706] env[68437]: value = "task-2944924" [ 1143.733706] env[68437]: _type = "Task" [ 1143.733706] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.744088] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944924, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.833033] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1143.833366] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1143.833551] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1143.833744] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1143.833907] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1143.834076] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1143.834304] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1143.834467] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1143.834641] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1143.834804] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1143.834981] env[68437]: DEBUG nova.virt.hardware [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1143.835904] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da8235b-e1ef-417a-8f9e-02fb6993aa3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.845490] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0793b1-134b-4015-a37a-2a8f2878e060 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.859596] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:fb:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '520c7db4-23e9-44bf-846b-9f1eb94579f7', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1143.867305] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1143.867648] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1143.867886] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cab130aa-bcd4-4918-a8d4-61ff2238a015 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.884574] env[68437]: INFO nova.compute.manager [-] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Took 1.41 seconds to deallocate network for instance. [ 1143.890810] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1143.890810] env[68437]: value = "task-2944925" [ 1143.890810] env[68437]: _type = "Task" [ 1143.890810] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.900351] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944925, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.977262] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944922, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.995628] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52de7e9f-3a29-9821-188a-e5be9c87b72e, 'name': SearchDatastore_Task, 'duration_secs': 0.051346} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.996515] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb33369c-cd42-44f4-a1da-5c50c8c1e6e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.003498] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1144.003498] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521708d3-7892-6fe8-e9a4-a43a8366a738" [ 1144.003498] env[68437]: _type = "Task" [ 1144.003498] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.014987] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521708d3-7892-6fe8-e9a4-a43a8366a738, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.122486] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944923, 'name': ReconfigVM_Task, 'duration_secs': 0.410528} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.122701] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3/volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1144.127435] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a58eb5ae-945d-4c30-83bc-44ef85eb6498 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.142887] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1144.142887] env[68437]: value = "task-2944926" [ 1144.142887] env[68437]: _type = "Task" [ 1144.142887] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.150564] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.245325] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944924, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.392530] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.392866] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.393249] env[68437]: DEBUG nova.objects.instance [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'resources' on Instance uuid 4fb5a384-0792-40df-b361-0784397a897f {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.404600] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944925, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.472101] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637814} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.472384] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 76ed714d-7ffe-4a64-ae78-bab76ba1504a/76ed714d-7ffe-4a64-ae78-bab76ba1504a.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1144.472623] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1144.473692] env[68437]: DEBUG nova.compute.manager [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1144.475637] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85d49409-74b6-4b83-9b52-96ec7d4fec82 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.481517] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1144.481517] env[68437]: value = "task-2944927" [ 1144.481517] env[68437]: _type = "Task" [ 1144.481517] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.489431] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.498065] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1144.498300] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1144.498458] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1144.498638] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1144.498784] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1144.498930] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1144.499220] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1144.499391] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1144.499558] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1144.499720] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1144.499893] env[68437]: DEBUG nova.virt.hardware [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1144.500680] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde0534c-b245-4233-bd8a-49b286a038cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.510458] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad9fcb4-4f16-401f-8f91-e912149fc1d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.517291] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521708d3-7892-6fe8-e9a4-a43a8366a738, 'name': SearchDatastore_Task, 'duration_secs': 0.068833} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.517821] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.518094] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] b4257b79-2723-43fd-b64f-74104802e048/b4257b79-2723-43fd-b64f-74104802e048.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1144.518322] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9751c5e3-0d38-46d5-8fc2-6d547bf1fdfd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.527687] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1144.533105] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1144.533938] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1144.534187] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd09afe8-36ba-49c2-95c4-db254cb6877f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.547317] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1144.547317] env[68437]: value = "task-2944928" [ 1144.547317] env[68437]: _type = "Task" [ 1144.547317] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.552035] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1144.552035] env[68437]: value = "task-2944929" [ 1144.552035] env[68437]: _type = "Task" [ 1144.552035] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.557596] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.561864] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944929, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.652450] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944926, 'name': ReconfigVM_Task, 'duration_secs': 0.129466} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.652690] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591112', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'name': 'volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9b98ff24-e9d1-4754-89d2-ee2daa54ad47', 'attached_at': '', 'detached_at': '', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'serial': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1144.653281] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b72b944-810a-4288-b3f8-5be6fcbf9c22 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.659678] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1144.659678] env[68437]: value = "task-2944930" [ 1144.659678] env[68437]: _type = "Task" [ 1144.659678] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.668703] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944930, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.744859] env[68437]: DEBUG oslo_vmware.api [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2944924, 'name': PowerOnVM_Task, 'duration_secs': 0.656559} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.745161] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1144.745389] env[68437]: DEBUG nova.compute.manager [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1144.746149] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68146543-e82b-4f9c-8c32-bda43934c3a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.909913] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944925, 'name': CreateVM_Task, 'duration_secs': 0.565338} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.910080] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1144.910765] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.910939] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.911385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1144.911606] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-387be16f-ab6a-439c-b61a-6dc56b6837e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.918416] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1144.918416] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523e55d0-4b27-d607-8db0-a17a1df5a2db" [ 1144.918416] env[68437]: _type = "Task" [ 1144.918416] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.931288] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523e55d0-4b27-d607-8db0-a17a1df5a2db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.991949] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072751} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.995657] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1144.996784] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c2dd2b-4a08-40cb-a42a-f055149f167f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.019549] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 76ed714d-7ffe-4a64-ae78-bab76ba1504a/76ed714d-7ffe-4a64-ae78-bab76ba1504a.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1145.022638] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e99380b-07d8-432a-826c-27ca98ccc2cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.048239] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1145.048239] env[68437]: value = "task-2944931" [ 1145.048239] env[68437]: _type = "Task" [ 1145.048239] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.067782] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.072164] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944928, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.081048] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944929, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.158613] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6687830a-449f-4f3d-b8e6-17d0b7eddf57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.171317] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf197163-e085-4a78-a653-307b0825bf43 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.174350] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944930, 'name': Rename_Task, 'duration_secs': 0.247877} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.174608] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.175119] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fbfbc46-b905-4d94-9887-faf102aa9dbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.206228] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd080e96-52d7-4b77-b9d1-a8e105a52f91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.208569] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1145.208569] env[68437]: value = "task-2944932" [ 1145.208569] env[68437]: _type = "Task" [ 1145.208569] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.215524] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89432f9-843c-4afb-985c-17de766bdcc4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.222233] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944932, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.232013] env[68437]: DEBUG nova.compute.provider_tree [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.262675] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.429164] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523e55d0-4b27-d607-8db0-a17a1df5a2db, 'name': SearchDatastore_Task, 'duration_secs': 0.06286} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.429483] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.429714] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1145.429944] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.430106] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.430295] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1145.430545] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b52fd48-057e-4396-b9a3-521169eebf31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.438751] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1145.438951] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1145.439598] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-410db99e-6a9b-4c72-8070-a975bdd4fe74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.444314] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1145.444314] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ed2065-7d96-f4cf-4dba-a5f092693119" [ 1145.444314] env[68437]: _type = "Task" [ 1145.444314] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.451709] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ed2065-7d96-f4cf-4dba-a5f092693119, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.560710] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944928, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576167} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.566383] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] b4257b79-2723-43fd-b64f-74104802e048/b4257b79-2723-43fd-b64f-74104802e048.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1145.566605] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1145.566853] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.567076] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36e5f6fe-a6eb-4c8e-935b-6a610d99d42f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.573043] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944929, 'name': CreateVM_Task, 'duration_secs': 0.560038} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.574030] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1145.574345] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1145.574345] env[68437]: value = "task-2944933" [ 1145.574345] env[68437]: _type = "Task" [ 1145.574345] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.574691] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.574847] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1145.575191] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1145.575462] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ebb41fb-af59-4151-9605-72902908a27f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.582939] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1145.582939] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52760efd-4709-8b81-c6d0-aa4c6791f708" [ 1145.582939] env[68437]: _type = "Task" [ 1145.582939] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.586069] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944933, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.592858] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52760efd-4709-8b81-c6d0-aa4c6791f708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.719318] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944932, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.735359] env[68437]: DEBUG nova.scheduler.client.report [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1145.956363] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ed2065-7d96-f4cf-4dba-a5f092693119, 'name': SearchDatastore_Task, 'duration_secs': 0.009322} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.957189] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fc16390-597e-40ca-b535-762681621dd6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.963088] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1145.963088] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5245053c-81fb-d14e-c2c7-b5eb6e90af1a" [ 1145.963088] env[68437]: _type = "Task" [ 1145.963088] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.970581] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5245053c-81fb-d14e-c2c7-b5eb6e90af1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.059535] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.083867] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944933, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063472} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.084294] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1146.084980] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd92c058-bd7d-45bb-96ed-2df79bfd4831 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.095654] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52760efd-4709-8b81-c6d0-aa4c6791f708, 'name': SearchDatastore_Task, 'duration_secs': 0.011049} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.102565] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.102820] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1146.103069] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.111535] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] b4257b79-2723-43fd-b64f-74104802e048/b4257b79-2723-43fd-b64f-74104802e048.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1146.111803] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6bdef27-5191-4c20-b7a6-1c1ae6b7effc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.132075] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1146.132075] env[68437]: value = "task-2944934" [ 1146.132075] env[68437]: _type = "Task" [ 1146.132075] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.140781] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944934, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.218959] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944932, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.240043] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.847s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.243284] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.982s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.243284] env[68437]: DEBUG nova.objects.instance [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1146.258955] env[68437]: INFO nova.scheduler.client.report [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted allocations for instance 4fb5a384-0792-40df-b361-0784397a897f [ 1146.474891] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5245053c-81fb-d14e-c2c7-b5eb6e90af1a, 'name': SearchDatastore_Task, 'duration_secs': 0.010398} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.475103] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.475389] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 76d97a56-21a2-4363-a987-ef872f056510/76d97a56-21a2-4363-a987-ef872f056510.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1146.475742] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.475941] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1146.476183] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14249340-1c22-4974-b0ca-55400bdb6c7d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.478067] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1fe5522-4579-46af-a815-0b75e6355232 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.484714] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1146.484714] env[68437]: value = "task-2944935" [ 1146.484714] env[68437]: _type = "Task" [ 1146.484714] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.492014] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944935, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.493093] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1146.493324] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1146.493964] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c61c0b9-5158-4bb8-81fc-f0b255576227 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.498482] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1146.498482] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526f07b0-afb9-6f4e-71d1-f3d523ad9b52" [ 1146.498482] env[68437]: _type = "Task" [ 1146.498482] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.505775] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526f07b0-afb9-6f4e-71d1-f3d523ad9b52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.559692] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944931, 'name': ReconfigVM_Task, 'duration_secs': 1.195201} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.559958] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 76ed714d-7ffe-4a64-ae78-bab76ba1504a/76ed714d-7ffe-4a64-ae78-bab76ba1504a.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1146.560615] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35d028d6-4390-49dc-bf19-5648e519d6f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.567404] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1146.567404] env[68437]: value = "task-2944936" [ 1146.567404] env[68437]: _type = "Task" [ 1146.567404] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.578799] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944936, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.642392] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944934, 'name': ReconfigVM_Task, 'duration_secs': 0.479845} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.642663] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Reconfigured VM instance instance-00000067 to attach disk [datastore1] b4257b79-2723-43fd-b64f-74104802e048/b4257b79-2723-43fd-b64f-74104802e048.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1146.643336] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-10d2c791-0820-4fa9-a4f9-609f5d28fb72 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.650261] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1146.650261] env[68437]: value = "task-2944937" [ 1146.650261] env[68437]: _type = "Task" [ 1146.650261] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.660040] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944937, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.720180] env[68437]: DEBUG oslo_vmware.api [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2944932, 'name': PowerOnVM_Task, 'duration_secs': 1.313194} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.720470] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1146.720684] env[68437]: INFO nova.compute.manager [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Took 7.58 seconds to spawn the instance on the hypervisor. [ 1146.720867] env[68437]: DEBUG nova.compute.manager [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.721787] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5e1878-e504-4427-aeff-b81556b8c420 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.474144] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f21f3039-7e59-4ae3-80d2-39c4c3ae1bfc tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "4fb5a384-0792-40df-b361-0784397a897f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.124s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.493500] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944936, 'name': Rename_Task, 'duration_secs': 0.176212} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.501575] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.502475] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944935, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520721} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.502597] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944937, 'name': Rename_Task, 'duration_secs': 0.401282} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.502797] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526f07b0-afb9-6f4e-71d1-f3d523ad9b52, 'name': SearchDatastore_Task, 'duration_secs': 0.040338} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.502987] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc2b1e2d-3778-40a4-bec1-e75a2cea1c13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.504586] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 76d97a56-21a2-4363-a987-ef872f056510/76d97a56-21a2-4363-a987-ef872f056510.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1147.504799] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1147.505087] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.506014] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-631b7564-d216-44cb-b7ed-10d12c970ca8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.507874] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50cb32b5-a301-490e-acb2-4ea6291aed05 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.509093] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d805fa0-47a5-4f42-be2d-d95ccccd908a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.515198] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1147.515198] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c25f93-cef0-259c-be0f-c542545dba82" [ 1147.515198] env[68437]: _type = "Task" [ 1147.515198] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.520805] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1147.520805] env[68437]: value = "task-2944939" [ 1147.520805] env[68437]: _type = "Task" [ 1147.520805] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.521052] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1147.521052] env[68437]: value = "task-2944938" [ 1147.521052] env[68437]: _type = "Task" [ 1147.521052] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.521275] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1147.521275] env[68437]: value = "task-2944940" [ 1147.521275] env[68437]: _type = "Task" [ 1147.521275] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.532992] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c25f93-cef0-259c-be0f-c542545dba82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.538868] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.546069] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944938, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.546262] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944940, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.976608] env[68437]: DEBUG oslo_concurrency.lockutils [None req-dce36a33-50db-4680-b5e9-c8f5d089c5c8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.734s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.996914] env[68437]: INFO nova.compute.manager [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Took 14.97 seconds to build instance. [ 1148.024818] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c25f93-cef0-259c-be0f-c542545dba82, 'name': SearchDatastore_Task, 'duration_secs': 0.016579} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.032865] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.033184] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 6fabc758-0d56-4adb-a54e-b9c8798a0151/6fabc758-0d56-4adb-a54e-b9c8798a0151.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1148.033479] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ca28247-0260-418b-8615-551be84d1f6e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.041415] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081399} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.047753] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1148.048077] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944938, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.048324] env[68437]: DEBUG oslo_vmware.api [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944940, 'name': PowerOnVM_Task, 'duration_secs': 0.489352} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.048568] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1148.048568] env[68437]: value = "task-2944941" [ 1148.048568] env[68437]: _type = "Task" [ 1148.048568] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.049231] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2fba9f-142d-43fd-a499-9937bbef94d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.051556] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.051757] env[68437]: INFO nova.compute.manager [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Took 5.87 seconds to spawn the instance on the hypervisor. [ 1148.051931] env[68437]: DEBUG nova.compute.manager [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.052708] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337c962e-86fc-4634-807a-1ae22bdf2133 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.082644] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 76d97a56-21a2-4363-a987-ef872f056510/76d97a56-21a2-4363-a987-ef872f056510.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.083205] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c4f5849-c9e3-418b-a1f7-5dcae2226b7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.101851] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1148.101851] env[68437]: value = "task-2944942" [ 1148.101851] env[68437]: _type = "Task" [ 1148.101851] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.111907] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944942, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.342050] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.342050] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.498808] env[68437]: DEBUG oslo_concurrency.lockutils [None req-731561e1-532e-4edc-a561-234e4024c106 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.476s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.535233] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944938, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.564570] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944941, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.598411] env[68437]: INFO nova.compute.manager [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Took 10.76 seconds to build instance. [ 1148.617351] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944942, 'name': ReconfigVM_Task, 'duration_secs': 0.323913} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.619660] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 76d97a56-21a2-4363-a987-ef872f056510/76d97a56-21a2-4363-a987-ef872f056510.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1148.623024] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'device_type': 'disk', 'guest_format': None, 'encrypted': False, 'encryption_options': None, 'encryption_secret_uuid': None, 'boot_index': 0, 'size': 0, 'encryption_format': None, 'disk_bus': None, 'image_id': 'a272f526-6b8d-4a29-bd06-cd29ab5fabbe'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '2ac203cb-56bf-464d-a014-e9ff04d2348e', 'device_type': None, 'mount_device': '/dev/sdb', 'boot_index': None, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'}, 'delete_on_termination': False, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68437) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1148.623024] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1148.623024] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1148.624866] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77915efc-b958-4561-b591-2b7d2c6198f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.643332] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2bc43b-78ec-428e-b443-e99122a90c7d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.673724] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3/volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.673724] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1e47d48-4486-43fc-ab21-abcc134d3e24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.694207] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1148.694207] env[68437]: value = "task-2944943" [ 1148.694207] env[68437]: _type = "Task" [ 1148.694207] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.705354] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944943, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.848032] env[68437]: DEBUG nova.compute.manager [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1149.035776] env[68437]: DEBUG oslo_vmware.api [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944938, 'name': PowerOnVM_Task, 'duration_secs': 1.049153} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.036155] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1149.036456] env[68437]: INFO nova.compute.manager [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Took 9.24 seconds to spawn the instance on the hypervisor. [ 1149.037025] env[68437]: DEBUG nova.compute.manager [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1149.040514] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a58284-931a-44aa-a593-5d9248cd5c1d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.045776] env[68437]: DEBUG nova.compute.manager [req-66440ec6-1823-40c0-931a-f30cf2e3524c req-36e41292-c666-48d0-8694-696a1e91797e service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Received event network-changed-9c369f5b-7c68-4b57-8c1e-cc2b007af652 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1149.046151] env[68437]: DEBUG nova.compute.manager [req-66440ec6-1823-40c0-931a-f30cf2e3524c req-36e41292-c666-48d0-8694-696a1e91797e service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Refreshing instance network info cache due to event network-changed-9c369f5b-7c68-4b57-8c1e-cc2b007af652. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1149.046491] env[68437]: DEBUG oslo_concurrency.lockutils [req-66440ec6-1823-40c0-931a-f30cf2e3524c req-36e41292-c666-48d0-8694-696a1e91797e service nova] Acquiring lock "refresh_cache-9b98ff24-e9d1-4754-89d2-ee2daa54ad47" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.046776] env[68437]: DEBUG oslo_concurrency.lockutils [req-66440ec6-1823-40c0-931a-f30cf2e3524c req-36e41292-c666-48d0-8694-696a1e91797e service nova] Acquired lock "refresh_cache-9b98ff24-e9d1-4754-89d2-ee2daa54ad47" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.047110] env[68437]: DEBUG nova.network.neutron [req-66440ec6-1823-40c0-931a-f30cf2e3524c req-36e41292-c666-48d0-8694-696a1e91797e service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Refreshing network info cache for port 9c369f5b-7c68-4b57-8c1e-cc2b007af652 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1149.064638] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.845739} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.064975] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 6fabc758-0d56-4adb-a54e-b9c8798a0151/6fabc758-0d56-4adb-a54e-b9c8798a0151.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1149.065334] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1149.065594] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9164cef2-562d-4313-915b-083809accb1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.072263] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1149.072263] env[68437]: value = "task-2944944" [ 1149.072263] env[68437]: _type = "Task" [ 1149.072263] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.080970] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944944, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.102889] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b06dd591-6711-4fbf-8a66-c2fd4f37bb94 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "b4257b79-2723-43fd-b64f-74104802e048" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.266s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.204878] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944943, 'name': ReconfigVM_Task, 'duration_secs': 0.368428} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.205085] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfigured VM instance instance-0000005a to attach disk [datastore1] volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3/volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.209920] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87e0aef5-0569-4fbb-969d-a2243f855a4a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.224942] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1149.224942] env[68437]: value = "task-2944945" [ 1149.224942] env[68437]: _type = "Task" [ 1149.224942] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.233645] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944945, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.370192] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.370520] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.373380] env[68437]: INFO nova.compute.claims [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1149.565420] env[68437]: INFO nova.compute.manager [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Took 14.10 seconds to build instance. [ 1149.583261] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944944, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078622} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.584258] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1149.585146] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bed3042-6e73-4fe1-8b46-1c551ef41244 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.606030] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 6fabc758-0d56-4adb-a54e-b9c8798a0151/6fabc758-0d56-4adb-a54e-b9c8798a0151.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1149.608137] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de01fea4-2d3e-4de6-b7c8-9c683355fa62 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.628942] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1149.628942] env[68437]: value = "task-2944946" [ 1149.628942] env[68437]: _type = "Task" [ 1149.628942] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.638253] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944946, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.735457] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944945, 'name': ReconfigVM_Task, 'duration_secs': 0.141434} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.736162] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1149.736833] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26997339-821e-4977-9adf-e7bb07a6034d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.744014] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1149.744014] env[68437]: value = "task-2944947" [ 1149.744014] env[68437]: _type = "Task" [ 1149.744014] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.754914] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944947, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.846255] env[68437]: DEBUG nova.network.neutron [req-66440ec6-1823-40c0-931a-f30cf2e3524c req-36e41292-c666-48d0-8694-696a1e91797e service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Updated VIF entry in instance network info cache for port 9c369f5b-7c68-4b57-8c1e-cc2b007af652. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1149.846603] env[68437]: DEBUG nova.network.neutron [req-66440ec6-1823-40c0-931a-f30cf2e3524c req-36e41292-c666-48d0-8694-696a1e91797e service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Updating instance_info_cache with network_info: [{"id": "9c369f5b-7c68-4b57-8c1e-cc2b007af652", "address": "fa:16:3e:d6:01:fa", "network": {"id": "1d1d8521-0bd8-413f-a787-780c6555502e", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-525691127-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "508b51afac37437f82818180bd9c1c3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c369f5b-7c", "ovs_interfaceid": "9c369f5b-7c68-4b57-8c1e-cc2b007af652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.067579] env[68437]: DEBUG oslo_concurrency.lockutils [None req-991345d8-8404-44ee-a10b-39ed64b21187 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.608s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.139267] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944946, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.255749] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944947, 'name': Rename_Task, 'duration_secs': 0.157478} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.256011] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1150.256275] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48c34be3-7cf5-4d2c-8139-66deb4f0ddda {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.263309] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1150.263309] env[68437]: value = "task-2944948" [ 1150.263309] env[68437]: _type = "Task" [ 1150.263309] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.270574] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.349524] env[68437]: DEBUG oslo_concurrency.lockutils [req-66440ec6-1823-40c0-931a-f30cf2e3524c req-36e41292-c666-48d0-8694-696a1e91797e service nova] Releasing lock "refresh_cache-9b98ff24-e9d1-4754-89d2-ee2daa54ad47" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.398849] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.399201] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.399491] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.399757] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.399950] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.404411] env[68437]: INFO nova.compute.manager [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Terminating instance [ 1150.578858] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5a25dc-1d04-4063-b5be-740c63179327 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.587185] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468393b9-e6ba-4999-9d5e-f797b9452885 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.618901] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7124858-032c-4801-968e-6f8ad0bd3293 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.627550] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e49fa6-20cc-4f0a-bc09-acd96131854a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.644132] env[68437]: DEBUG nova.compute.provider_tree [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.648791] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944946, 'name': ReconfigVM_Task, 'duration_secs': 1.006863} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.649345] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 6fabc758-0d56-4adb-a54e-b9c8798a0151/6fabc758-0d56-4adb-a54e-b9c8798a0151.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1150.649900] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78bd400f-d910-424f-9971-1c4789280a8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.657650] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1150.657650] env[68437]: value = "task-2944949" [ 1150.657650] env[68437]: _type = "Task" [ 1150.657650] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.666199] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944949, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.773638] env[68437]: DEBUG oslo_vmware.api [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2944948, 'name': PowerOnVM_Task, 'duration_secs': 0.462056} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.773907] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1150.774245] env[68437]: DEBUG nova.compute.manager [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1150.775016] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde91a06-9516-4821-bc77-3a56a0d35357 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.909012] env[68437]: DEBUG nova.compute.manager [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1150.909214] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1150.910109] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8edb9f-cba8-4f15-92b4-e8a7194335ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.917971] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1150.918215] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d58d3c2c-4c14-4876-a3ef-2efb687b3d20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.924874] env[68437]: DEBUG oslo_vmware.api [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1150.924874] env[68437]: value = "task-2944950" [ 1150.924874] env[68437]: _type = "Task" [ 1150.924874] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.932719] env[68437]: DEBUG oslo_vmware.api [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.151210] env[68437]: DEBUG nova.scheduler.client.report [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1151.168632] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944949, 'name': Rename_Task, 'duration_secs': 0.164946} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.169858] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1151.170284] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50e60aab-2928-48e4-b121-42f72a229f50 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.177829] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1151.177829] env[68437]: value = "task-2944951" [ 1151.177829] env[68437]: _type = "Task" [ 1151.177829] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.185360] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944951, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.290859] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.436940] env[68437]: DEBUG oslo_vmware.api [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944950, 'name': PowerOffVM_Task, 'duration_secs': 0.392536} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.437204] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1151.437386] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1151.437640] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d756262-082c-4a50-9974-1d7513614cb5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.513142] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1151.513697] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1151.513850] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleting the datastore file [datastore1] 76ed714d-7ffe-4a64-ae78-bab76ba1504a {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1151.514232] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7730c7c4-6f9a-4ff3-ace3-32e4f7fe7558 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.521255] env[68437]: DEBUG oslo_vmware.api [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1151.521255] env[68437]: value = "task-2944953" [ 1151.521255] env[68437]: _type = "Task" [ 1151.521255] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.530838] env[68437]: DEBUG oslo_vmware.api [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.656869] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.286s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.657476] env[68437]: DEBUG nova.compute.manager [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1151.660177] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.369s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.660363] env[68437]: DEBUG nova.objects.instance [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1151.688789] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944951, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.033111] env[68437]: DEBUG oslo_vmware.api [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24704} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.033271] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.033340] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.033502] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.033675] env[68437]: INFO nova.compute.manager [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1152.033914] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1152.034146] env[68437]: DEBUG nova.compute.manager [-] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1152.034585] env[68437]: DEBUG nova.network.neutron [-] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1152.164543] env[68437]: DEBUG nova.compute.utils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1152.168686] env[68437]: DEBUG nova.compute.manager [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1152.168859] env[68437]: DEBUG nova.network.neutron [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1152.191595] env[68437]: DEBUG oslo_vmware.api [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944951, 'name': PowerOnVM_Task, 'duration_secs': 0.757471} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.193068] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1152.193068] env[68437]: INFO nova.compute.manager [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1152.193068] env[68437]: DEBUG nova.compute.manager [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1152.193629] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb3cab2-4cc1-49a9-97a6-791b535cb246 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.209959] env[68437]: DEBUG nova.policy [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ff3c9a96f10413f860946488fa85aee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38ad479949b24307b08e16fdb821c76f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1152.303802] env[68437]: DEBUG nova.compute.manager [req-d6d2f714-a8e5-4f51-83ad-27118333034d req-ca224e2a-7153-4a3a-ac6a-3bb2d5d6bcba service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Received event network-vif-deleted-7e45a17c-73eb-46e6-be1e-b49fc753eb9b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1152.303933] env[68437]: INFO nova.compute.manager [req-d6d2f714-a8e5-4f51-83ad-27118333034d req-ca224e2a-7153-4a3a-ac6a-3bb2d5d6bcba service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Neutron deleted interface 7e45a17c-73eb-46e6-be1e-b49fc753eb9b; detaching it from the instance and deleting it from the info cache [ 1152.304150] env[68437]: DEBUG nova.network.neutron [req-d6d2f714-a8e5-4f51-83ad-27118333034d req-ca224e2a-7153-4a3a-ac6a-3bb2d5d6bcba service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.485741] env[68437]: DEBUG nova.network.neutron [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Successfully created port: f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1152.669930] env[68437]: DEBUG nova.compute.manager [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1152.673622] env[68437]: DEBUG oslo_concurrency.lockutils [None req-60e5e419-6d84-433e-bf58-b05859ce0124 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.722662] env[68437]: INFO nova.compute.manager [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Took 14.67 seconds to build instance. [ 1152.784130] env[68437]: DEBUG nova.network.neutron [-] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.806626] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3397a67-0c51-4bdc-aad4-c4af311a5af7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.820504] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0f4431-1e2a-4cbe-a80e-d38d66266bb6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.861727] env[68437]: DEBUG nova.compute.manager [req-d6d2f714-a8e5-4f51-83ad-27118333034d req-ca224e2a-7153-4a3a-ac6a-3bb2d5d6bcba service nova] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Detach interface failed, port_id=7e45a17c-73eb-46e6-be1e-b49fc753eb9b, reason: Instance 76ed714d-7ffe-4a64-ae78-bab76ba1504a could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1153.225336] env[68437]: DEBUG oslo_concurrency.lockutils [None req-59d04aa2-875e-43a6-8387-6b916c969d70 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "6fabc758-0d56-4adb-a54e-b9c8798a0151" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.185s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.287088] env[68437]: INFO nova.compute.manager [-] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Took 1.25 seconds to deallocate network for instance. [ 1153.315708] env[68437]: INFO nova.compute.manager [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Rebuilding instance [ 1153.357667] env[68437]: DEBUG nova.compute.manager [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1153.358738] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1c9741-1f68-432d-91be-53faa4b3886f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.680664] env[68437]: DEBUG nova.compute.manager [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1153.704049] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1153.704326] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1153.704491] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1153.704672] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1153.704820] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1153.704986] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1153.705250] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1153.705421] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1153.705613] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1153.705779] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1153.705965] env[68437]: DEBUG nova.virt.hardware [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1153.706897] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc3eeba-dee9-4cd6-819c-b0dbcf288a8a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.715564] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccaa5a9-cb9e-4da9-92f5-3f401050478a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.795140] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.795436] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.795658] env[68437]: DEBUG nova.objects.instance [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lazy-loading 'resources' on Instance uuid 76ed714d-7ffe-4a64-ae78-bab76ba1504a {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1154.076042] env[68437]: DEBUG nova.compute.manager [req-f50a5fc4-3380-47a2-b594-2853e255778a req-4a66d9b3-cf85-4148-9404-79b8fd9581b7 service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Received event network-vif-plugged-f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1154.076209] env[68437]: DEBUG oslo_concurrency.lockutils [req-f50a5fc4-3380-47a2-b594-2853e255778a req-4a66d9b3-cf85-4148-9404-79b8fd9581b7 service nova] Acquiring lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.076378] env[68437]: DEBUG oslo_concurrency.lockutils [req-f50a5fc4-3380-47a2-b594-2853e255778a req-4a66d9b3-cf85-4148-9404-79b8fd9581b7 service nova] Lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.076550] env[68437]: DEBUG oslo_concurrency.lockutils [req-f50a5fc4-3380-47a2-b594-2853e255778a req-4a66d9b3-cf85-4148-9404-79b8fd9581b7 service nova] Lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.076720] env[68437]: DEBUG nova.compute.manager [req-f50a5fc4-3380-47a2-b594-2853e255778a req-4a66d9b3-cf85-4148-9404-79b8fd9581b7 service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] No waiting events found dispatching network-vif-plugged-f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1154.076886] env[68437]: WARNING nova.compute.manager [req-f50a5fc4-3380-47a2-b594-2853e255778a req-4a66d9b3-cf85-4148-9404-79b8fd9581b7 service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Received unexpected event network-vif-plugged-f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf for instance with vm_state building and task_state spawning. [ 1154.162343] env[68437]: DEBUG nova.network.neutron [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Successfully updated port: f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1154.372466] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1154.372785] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-088f4ab0-4efe-4667-b8c3-d68f8e825fe8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.379801] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1154.379801] env[68437]: value = "task-2944954" [ 1154.379801] env[68437]: _type = "Task" [ 1154.379801] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.388390] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.488545] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb5afe0-5f49-4af5-b0bd-1e431297c0a5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.496701] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3405cac-dc02-456c-b7b7-71ca8256ddf9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.526242] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc820e52-10af-4fc7-9c8c-daac786349fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.533693] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1562d07-1fb7-40f9-b5f9-1204bfaea5c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.547040] env[68437]: DEBUG nova.compute.provider_tree [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.666738] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.666738] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.666738] env[68437]: DEBUG nova.network.neutron [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1154.889925] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944954, 'name': PowerOffVM_Task, 'duration_secs': 0.194444} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.890196] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1154.890428] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1154.891176] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ab0c24-b630-4c2d-a6e4-2ac95ab21e03 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.897642] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1154.897851] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0e3620c-abe8-4f50-9f4a-3b6c2f670e94 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.921721] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1154.921888] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1154.922075] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Deleting the datastore file [datastore1] 6fabc758-0d56-4adb-a54e-b9c8798a0151 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1154.922332] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cda87bf-e1a5-41e6-b657-1bbb84635a25 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.930181] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1154.930181] env[68437]: value = "task-2944956" [ 1154.930181] env[68437]: _type = "Task" [ 1154.930181] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.937687] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.050375] env[68437]: DEBUG nova.scheduler.client.report [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1155.198087] env[68437]: DEBUG nova.network.neutron [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1155.320094] env[68437]: DEBUG nova.network.neutron [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Updating instance_info_cache with network_info: [{"id": "f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf", "address": "fa:16:3e:bc:0d:3a", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf846ab07-2c", "ovs_interfaceid": "f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.439637] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.483106} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.439926] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1155.440088] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1155.440271] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1155.555856] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.574553] env[68437]: INFO nova.scheduler.client.report [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted allocations for instance 76ed714d-7ffe-4a64-ae78-bab76ba1504a [ 1155.822736] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.823234] env[68437]: DEBUG nova.compute.manager [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Instance network_info: |[{"id": "f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf", "address": "fa:16:3e:bc:0d:3a", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf846ab07-2c", "ovs_interfaceid": "f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1155.823667] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:0d:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1155.831180] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1155.831409] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1155.831635] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-065fdcb9-b912-4179-9caa-8e4a894adc4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.851608] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1155.851608] env[68437]: value = "task-2944957" [ 1155.851608] env[68437]: _type = "Task" [ 1155.851608] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.858993] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944957, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.031841] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.032148] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.032375] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.032759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.032759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.034978] env[68437]: INFO nova.compute.manager [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Terminating instance [ 1156.081266] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9ef0c522-4b92-487f-b61d-234207d51c52 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "76ed714d-7ffe-4a64-ae78-bab76ba1504a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.682s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.103071] env[68437]: DEBUG nova.compute.manager [req-69ee02c0-3602-4e66-8069-8fe9bf35d942 req-c6952838-c78e-48fa-97a9-0aad6c0c5edb service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Received event network-changed-f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1156.103288] env[68437]: DEBUG nova.compute.manager [req-69ee02c0-3602-4e66-8069-8fe9bf35d942 req-c6952838-c78e-48fa-97a9-0aad6c0c5edb service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Refreshing instance network info cache due to event network-changed-f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1156.103505] env[68437]: DEBUG oslo_concurrency.lockutils [req-69ee02c0-3602-4e66-8069-8fe9bf35d942 req-c6952838-c78e-48fa-97a9-0aad6c0c5edb service nova] Acquiring lock "refresh_cache-96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.103633] env[68437]: DEBUG oslo_concurrency.lockutils [req-69ee02c0-3602-4e66-8069-8fe9bf35d942 req-c6952838-c78e-48fa-97a9-0aad6c0c5edb service nova] Acquired lock "refresh_cache-96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.103792] env[68437]: DEBUG nova.network.neutron [req-69ee02c0-3602-4e66-8069-8fe9bf35d942 req-c6952838-c78e-48fa-97a9-0aad6c0c5edb service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Refreshing network info cache for port f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1156.363106] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944957, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.474797] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1156.475116] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1156.475278] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1156.475545] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1156.475700] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1156.475846] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1156.476076] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1156.476317] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1156.476507] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1156.476676] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1156.476853] env[68437]: DEBUG nova.virt.hardware [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1156.477720] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51bd5db-afff-4976-afee-e64048bd8cfa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.486224] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5f0487-83b9-4ebf-bd46-65f939a22044 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.502722] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Instance VIF info [] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1156.508305] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1156.508540] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1156.508742] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d871229-7a19-4df2-a8e2-786095664d38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.526321] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1156.526321] env[68437]: value = "task-2944958" [ 1156.526321] env[68437]: _type = "Task" [ 1156.526321] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.533775] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944958, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.538471] env[68437]: DEBUG nova.compute.manager [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1156.538672] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.539403] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f72252-929e-450b-9249-c6fcbb1836fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.546134] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1156.546362] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aafbd338-6af5-48ac-8e46-6b71b68964ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.552253] env[68437]: DEBUG oslo_vmware.api [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1156.552253] env[68437]: value = "task-2944959" [ 1156.552253] env[68437]: _type = "Task" [ 1156.552253] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.559923] env[68437]: DEBUG oslo_vmware.api [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944959, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.816607] env[68437]: DEBUG nova.network.neutron [req-69ee02c0-3602-4e66-8069-8fe9bf35d942 req-c6952838-c78e-48fa-97a9-0aad6c0c5edb service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Updated VIF entry in instance network info cache for port f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1156.816996] env[68437]: DEBUG nova.network.neutron [req-69ee02c0-3602-4e66-8069-8fe9bf35d942 req-c6952838-c78e-48fa-97a9-0aad6c0c5edb service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Updating instance_info_cache with network_info: [{"id": "f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf", "address": "fa:16:3e:bc:0d:3a", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf846ab07-2c", "ovs_interfaceid": "f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.863147] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944957, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.036078] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944958, 'name': CreateVM_Task, 'duration_secs': 0.282785} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.036235] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1157.036619] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.036784] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.037146] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1157.037373] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea8ca2df-b7db-4853-a3b5-eb218d81c099 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.041398] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1157.041398] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52fb5c0c-04e4-09e4-9b4a-a07ae93177f6" [ 1157.041398] env[68437]: _type = "Task" [ 1157.041398] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.048422] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fb5c0c-04e4-09e4-9b4a-a07ae93177f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.059224] env[68437]: DEBUG oslo_vmware.api [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944959, 'name': PowerOffVM_Task, 'duration_secs': 0.225399} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.059457] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1157.059625] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1157.059837] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e34b3396-3202-4679-be80-3726eb3492b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.120779] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1157.121043] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1157.121242] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleting the datastore file [datastore1] ea09a88a-d426-4af4-aa07-945ccfbf2a24 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1157.121507] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-683b62b1-7667-40cb-ab37-1baaeec54a0e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.128126] env[68437]: DEBUG oslo_vmware.api [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1157.128126] env[68437]: value = "task-2944961" [ 1157.128126] env[68437]: _type = "Task" [ 1157.128126] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.136037] env[68437]: DEBUG oslo_vmware.api [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.320312] env[68437]: DEBUG oslo_concurrency.lockutils [req-69ee02c0-3602-4e66-8069-8fe9bf35d942 req-c6952838-c78e-48fa-97a9-0aad6c0c5edb service nova] Releasing lock "refresh_cache-96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.363431] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944957, 'name': CreateVM_Task, 'duration_secs': 1.484324} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.363615] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1157.364420] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.484424] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "923133d9-3a2d-4309-83a7-ab59315ee4dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.484650] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "923133d9-3a2d-4309-83a7-ab59315ee4dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.553631] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fb5c0c-04e4-09e4-9b4a-a07ae93177f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009746} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.553941] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.554243] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1157.554496] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.554659] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.554810] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1157.555094] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.555395] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1157.555623] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-456c3dcd-4503-4ffb-b270-e1806be6dcef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.557396] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87be8eaa-88a4-40ca-888c-0d9ebc3ca13c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.562533] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1157.562533] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527fcdb2-6d10-bb0e-8464-96ba2cb23f3f" [ 1157.562533] env[68437]: _type = "Task" [ 1157.562533] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.566257] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1157.566441] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1157.567386] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53db0c22-fa3f-4762-9d4c-f9ced5588fe6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.572231] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527fcdb2-6d10-bb0e-8464-96ba2cb23f3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.575010] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1157.575010] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5266ff28-4baf-acf0-2ed6-f303755c79b9" [ 1157.575010] env[68437]: _type = "Task" [ 1157.575010] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.581926] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5266ff28-4baf-acf0-2ed6-f303755c79b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.637779] env[68437]: DEBUG oslo_vmware.api [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151919} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.638082] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.638306] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.638563] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.638754] env[68437]: INFO nova.compute.manager [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1157.638992] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.639199] env[68437]: DEBUG nova.compute.manager [-] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1157.639294] env[68437]: DEBUG nova.network.neutron [-] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1157.987590] env[68437]: DEBUG nova.compute.manager [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1158.074481] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527fcdb2-6d10-bb0e-8464-96ba2cb23f3f, 'name': SearchDatastore_Task, 'duration_secs': 0.009243} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.074784] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.075044] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1158.075242] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.083465] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5266ff28-4baf-acf0-2ed6-f303755c79b9, 'name': SearchDatastore_Task, 'duration_secs': 0.01669} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.084225] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-160c4ff8-be64-4e3b-b2b1-478a6b3406d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.088986] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1158.088986] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5246a87b-78dc-52bd-15c1-bcde7b12a70e" [ 1158.088986] env[68437]: _type = "Task" [ 1158.088986] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.096314] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5246a87b-78dc-52bd-15c1-bcde7b12a70e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.129627] env[68437]: DEBUG nova.compute.manager [req-4ec1fbca-e6c8-41a1-8624-db5b10de69f5 req-2f743750-26b7-413b-922b-cd1accac4ae2 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Received event network-vif-deleted-d8383639-3dc6-429a-84bb-d34c1c98e26d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1158.129627] env[68437]: INFO nova.compute.manager [req-4ec1fbca-e6c8-41a1-8624-db5b10de69f5 req-2f743750-26b7-413b-922b-cd1accac4ae2 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Neutron deleted interface d8383639-3dc6-429a-84bb-d34c1c98e26d; detaching it from the instance and deleting it from the info cache [ 1158.129734] env[68437]: DEBUG nova.network.neutron [req-4ec1fbca-e6c8-41a1-8624-db5b10de69f5 req-2f743750-26b7-413b-922b-cd1accac4ae2 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.510492] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.510790] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.512331] env[68437]: INFO nova.compute.claims [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1158.555636] env[68437]: DEBUG nova.network.neutron [-] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.599319] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5246a87b-78dc-52bd-15c1-bcde7b12a70e, 'name': SearchDatastore_Task, 'duration_secs': 0.046171} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.599622] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.599882] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 6fabc758-0d56-4adb-a54e-b9c8798a0151/6fabc758-0d56-4adb-a54e-b9c8798a0151.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1158.600184] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.600374] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1158.600594] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2037312-bcb8-4b12-b2c5-fcdc0e82ff8c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.602765] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10fd39c2-6c76-47d8-b890-fbddf7fa551e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.613898] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1158.613898] env[68437]: value = "task-2944962" [ 1158.613898] env[68437]: _type = "Task" [ 1158.613898] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.614982] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1158.615174] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1158.616127] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80d88829-a118-4136-90fd-eaaea7105ae7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.620942] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.623668] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1158.623668] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52291a65-fd4d-a277-088d-3ce411c27470" [ 1158.623668] env[68437]: _type = "Task" [ 1158.623668] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.630500] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52291a65-fd4d-a277-088d-3ce411c27470, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.632392] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f9bd148-1482-428b-9552-373c85d93f84 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.639620] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d32c5f-94f8-4b92-ab67-d322ab3700e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.674501] env[68437]: DEBUG nova.compute.manager [req-4ec1fbca-e6c8-41a1-8624-db5b10de69f5 req-2f743750-26b7-413b-922b-cd1accac4ae2 service nova] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Detach interface failed, port_id=d8383639-3dc6-429a-84bb-d34c1c98e26d, reason: Instance ea09a88a-d426-4af4-aa07-945ccfbf2a24 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1159.057751] env[68437]: INFO nova.compute.manager [-] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Took 1.42 seconds to deallocate network for instance. [ 1159.121547] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442381} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.121768] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 6fabc758-0d56-4adb-a54e-b9c8798a0151/6fabc758-0d56-4adb-a54e-b9c8798a0151.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1159.121977] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1159.122246] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3907431-f265-4ca4-a34a-c5c6f0a11ace {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.132196] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52291a65-fd4d-a277-088d-3ce411c27470, 'name': SearchDatastore_Task, 'duration_secs': 0.008771} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.133883] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1159.133883] env[68437]: value = "task-2944963" [ 1159.133883] env[68437]: _type = "Task" [ 1159.133883] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.134174] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5ecbbb0-c73c-47d1-8f3d-3f595414ca43 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.143367] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1159.143367] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c51320-42c0-92c9-959b-882cfd196690" [ 1159.143367] env[68437]: _type = "Task" [ 1159.143367] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.146564] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944963, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.154589] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c51320-42c0-92c9-959b-882cfd196690, 'name': SearchDatastore_Task, 'duration_secs': 0.009039} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.154839] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.155129] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f/96c7aa3f-9098-49fe-8f5f-c3c45110fb4f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1159.155383] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93e1ef8f-bd9b-4a07-85a7-74f016d294b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.161601] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1159.161601] env[68437]: value = "task-2944964" [ 1159.161601] env[68437]: _type = "Task" [ 1159.161601] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.169528] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.565944] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.645371] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944963, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065775} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.647804] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1159.648780] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d393b6-7088-4501-887b-1eef7ea938d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.668711] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 6fabc758-0d56-4adb-a54e-b9c8798a0151/6fabc758-0d56-4adb-a54e-b9c8798a0151.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1159.671289] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4b6b239-f9f0-4a9a-9ec2-f1783140a68d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.693397] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451027} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.694548] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f/96c7aa3f-9098-49fe-8f5f-c3c45110fb4f.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1159.694769] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1159.695078] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1159.695078] env[68437]: value = "task-2944965" [ 1159.695078] env[68437]: _type = "Task" [ 1159.695078] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.697288] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-037c53c4-32fc-4373-bb1d-00a09da8c86a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.707372] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944965, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.708223] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1159.708223] env[68437]: value = "task-2944966" [ 1159.708223] env[68437]: _type = "Task" [ 1159.708223] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.720970] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.762256] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521000a6-f385-47e3-a595-217d511fbfb6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.771053] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f88a1bf-cc15-4442-ac22-ef59ba3b4052 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.801455] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04815be0-e63f-4adc-9b56-b3f0ebc289a4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.809487] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236b94ad-45fb-456a-bfac-727207bbc3b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.823663] env[68437]: DEBUG nova.compute.provider_tree [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.212445] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944965, 'name': ReconfigVM_Task, 'duration_secs': 0.340947} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.216989] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 6fabc758-0d56-4adb-a54e-b9c8798a0151/6fabc758-0d56-4adb-a54e-b9c8798a0151.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1160.217846] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25644bbf-1774-43a3-89d1-a65644387275 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.226547] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944966, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083603} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.228160] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1160.228634] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1160.228634] env[68437]: value = "task-2944967" [ 1160.228634] env[68437]: _type = "Task" [ 1160.228634] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.229652] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9296d72a-f940-47bb-ac6b-2fbaaaa3a98e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.244074] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944967, 'name': Rename_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.274248] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f/96c7aa3f-9098-49fe-8f5f-c3c45110fb4f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1160.274665] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94c50985-2f28-40be-a082-ca00d4aab64c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.300483] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1160.300483] env[68437]: value = "task-2944968" [ 1160.300483] env[68437]: _type = "Task" [ 1160.300483] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.308363] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944968, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.326610] env[68437]: DEBUG nova.scheduler.client.report [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.742653] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944967, 'name': Rename_Task, 'duration_secs': 0.182504} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.743103] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1160.743411] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d3e2883-0525-4511-8910-f9821d6674da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.750313] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1160.750313] env[68437]: value = "task-2944969" [ 1160.750313] env[68437]: _type = "Task" [ 1160.750313] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.759589] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944969, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.809356] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944968, 'name': ReconfigVM_Task, 'duration_secs': 0.488861} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.809648] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f/96c7aa3f-9098-49fe-8f5f-c3c45110fb4f.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1160.810294] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ad479be-34c3-4884-abcd-d70082235bc6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.816232] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1160.816232] env[68437]: value = "task-2944970" [ 1160.816232] env[68437]: _type = "Task" [ 1160.816232] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.823590] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944970, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.831476] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.832088] env[68437]: DEBUG nova.compute.manager [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1160.834866] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.269s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.835098] env[68437]: DEBUG nova.objects.instance [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'resources' on Instance uuid ea09a88a-d426-4af4-aa07-945ccfbf2a24 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.264252] env[68437]: DEBUG oslo_vmware.api [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944969, 'name': PowerOnVM_Task, 'duration_secs': 0.504037} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.264252] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1161.264252] env[68437]: DEBUG nova.compute.manager [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1161.265104] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12171963-1f5d-4cc2-93ad-7772543f3730 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.326362] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944970, 'name': Rename_Task, 'duration_secs': 0.175109} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.326636] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1161.326873] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97d1046a-9135-439b-a940-e9f8378339ee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.332723] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1161.332723] env[68437]: value = "task-2944971" [ 1161.332723] env[68437]: _type = "Task" [ 1161.332723] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.337709] env[68437]: DEBUG nova.compute.utils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1161.341392] env[68437]: DEBUG nova.compute.manager [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1161.341664] env[68437]: DEBUG nova.network.neutron [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1161.348339] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944971, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.382388] env[68437]: DEBUG nova.policy [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f745cbd7edb641af8623447b00021ac6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b7dfebb79e54e4fba7e0b142f99d7eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1161.535012] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1703aea3-9927-4ad6-ac9f-5a2ac7df5e49 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.543562] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86fcc37-e0b1-46cd-b24a-6f8f3b248c1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.585043] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87437028-9f59-4f4a-9d5f-c8a22a8ac5b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.593026] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c15f0b-b250-441c-9073-cccb9be28fb1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.607027] env[68437]: DEBUG nova.compute.provider_tree [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.679651] env[68437]: DEBUG nova.network.neutron [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Successfully created port: eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1161.782592] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.845754] env[68437]: DEBUG nova.compute.manager [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1161.848319] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944971, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.110869] env[68437]: DEBUG nova.scheduler.client.report [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1162.125869] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "6fabc758-0d56-4adb-a54e-b9c8798a0151" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.126136] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "6fabc758-0d56-4adb-a54e-b9c8798a0151" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.126339] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "6fabc758-0d56-4adb-a54e-b9c8798a0151-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.126939] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "6fabc758-0d56-4adb-a54e-b9c8798a0151-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.127148] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "6fabc758-0d56-4adb-a54e-b9c8798a0151-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.129095] env[68437]: INFO nova.compute.manager [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Terminating instance [ 1162.343790] env[68437]: DEBUG oslo_vmware.api [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944971, 'name': PowerOnVM_Task, 'duration_secs': 0.851675} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.344112] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1162.344367] env[68437]: INFO nova.compute.manager [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Took 8.66 seconds to spawn the instance on the hypervisor. [ 1162.344552] env[68437]: DEBUG nova.compute.manager [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1162.345325] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd92360f-f7e1-4399-8cfe-8852158985ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.615714] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.781s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.618142] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.836s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.618345] env[68437]: DEBUG nova.objects.instance [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68437) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1162.632395] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "refresh_cache-6fabc758-0d56-4adb-a54e-b9c8798a0151" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.632643] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired lock "refresh_cache-6fabc758-0d56-4adb-a54e-b9c8798a0151" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.632802] env[68437]: DEBUG nova.network.neutron [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1162.642355] env[68437]: INFO nova.scheduler.client.report [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleted allocations for instance ea09a88a-d426-4af4-aa07-945ccfbf2a24 [ 1162.854062] env[68437]: DEBUG nova.compute.manager [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1162.866363] env[68437]: INFO nova.compute.manager [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Took 13.52 seconds to build instance. [ 1162.880021] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1162.880267] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1162.880505] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1162.880740] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1162.880924] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1162.881179] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1162.881446] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1162.881645] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1162.881850] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1162.882073] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1162.882293] env[68437]: DEBUG nova.virt.hardware [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1162.883611] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f252de-cbba-461d-9b23-b4f336b590eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.892389] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e20b4de-e7fe-4c88-9302-7f84cae6dfbd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.056436] env[68437]: DEBUG nova.compute.manager [req-a099e4b5-1bf4-4011-8b31-dd3b7d525d80 req-b0012ae9-1998-4ca0-9e8f-6f1a7d3de6bc service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Received event network-vif-plugged-eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1163.056669] env[68437]: DEBUG oslo_concurrency.lockutils [req-a099e4b5-1bf4-4011-8b31-dd3b7d525d80 req-b0012ae9-1998-4ca0-9e8f-6f1a7d3de6bc service nova] Acquiring lock "923133d9-3a2d-4309-83a7-ab59315ee4dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.056886] env[68437]: DEBUG oslo_concurrency.lockutils [req-a099e4b5-1bf4-4011-8b31-dd3b7d525d80 req-b0012ae9-1998-4ca0-9e8f-6f1a7d3de6bc service nova] Lock "923133d9-3a2d-4309-83a7-ab59315ee4dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.057066] env[68437]: DEBUG oslo_concurrency.lockutils [req-a099e4b5-1bf4-4011-8b31-dd3b7d525d80 req-b0012ae9-1998-4ca0-9e8f-6f1a7d3de6bc service nova] Lock "923133d9-3a2d-4309-83a7-ab59315ee4dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.057237] env[68437]: DEBUG nova.compute.manager [req-a099e4b5-1bf4-4011-8b31-dd3b7d525d80 req-b0012ae9-1998-4ca0-9e8f-6f1a7d3de6bc service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] No waiting events found dispatching network-vif-plugged-eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1163.057398] env[68437]: WARNING nova.compute.manager [req-a099e4b5-1bf4-4011-8b31-dd3b7d525d80 req-b0012ae9-1998-4ca0-9e8f-6f1a7d3de6bc service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Received unexpected event network-vif-plugged-eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50 for instance with vm_state building and task_state spawning. [ 1163.151376] env[68437]: DEBUG nova.network.neutron [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Successfully updated port: eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1163.153680] env[68437]: DEBUG nova.network.neutron [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1163.155178] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9d4c82d2-7a1c-4eec-b53b-e2bdf3403c3c tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ea09a88a-d426-4af4-aa07-945ccfbf2a24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.123s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.205949] env[68437]: DEBUG nova.network.neutron [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.368682] env[68437]: DEBUG oslo_concurrency.lockutils [None req-9c08118f-631f-49cd-9536-73fd045ddc96 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.027s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.627445] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d5c7d961-8478-4ff7-bf6b-fcbe458fb9e0 tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.656646] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "refresh_cache-923133d9-3a2d-4309-83a7-ab59315ee4dc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.656969] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "refresh_cache-923133d9-3a2d-4309-83a7-ab59315ee4dc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.656969] env[68437]: DEBUG nova.network.neutron [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1163.684016] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01301fa3-97bf-45dd-a4ec-15bfcc886bd0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.691390] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0a60881b-28db-4006-ac37-03c772cb9627 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Suspending the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1163.691968] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-44df79ed-97d4-4c95-b3aa-72d2a57ef0f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.697466] env[68437]: DEBUG oslo_vmware.api [None req-0a60881b-28db-4006-ac37-03c772cb9627 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1163.697466] env[68437]: value = "task-2944972" [ 1163.697466] env[68437]: _type = "Task" [ 1163.697466] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.705363] env[68437]: DEBUG oslo_vmware.api [None req-0a60881b-28db-4006-ac37-03c772cb9627 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944972, 'name': SuspendVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.707892] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Releasing lock "refresh_cache-6fabc758-0d56-4adb-a54e-b9c8798a0151" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1163.708285] env[68437]: DEBUG nova.compute.manager [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1163.708474] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1163.709282] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c68f291-53b1-40f4-8cd6-ed5d6fcc5b8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.716068] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1163.716289] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f579ba02-7e4b-446a-8233-d38bd5d05b25 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.721577] env[68437]: DEBUG oslo_vmware.api [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1163.721577] env[68437]: value = "task-2944973" [ 1163.721577] env[68437]: _type = "Task" [ 1163.721577] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.730266] env[68437]: DEBUG oslo_vmware.api [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.156732] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.157108] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.189393] env[68437]: DEBUG nova.network.neutron [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1164.209913] env[68437]: DEBUG oslo_vmware.api [None req-0a60881b-28db-4006-ac37-03c772cb9627 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944972, 'name': SuspendVM_Task} progress is 70%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.230892] env[68437]: DEBUG oslo_vmware.api [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944973, 'name': PowerOffVM_Task, 'duration_secs': 0.138525} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.233640] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1164.233977] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1164.234294] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-faf0f509-3c58-4480-9176-4361681a3695 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.259984] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1164.260304] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1164.260515] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Deleting the datastore file [datastore2] 6fabc758-0d56-4adb-a54e-b9c8798a0151 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1164.260822] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a4f8eee-0985-4187-af47-c6d0f277573c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.269864] env[68437]: DEBUG oslo_vmware.api [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1164.269864] env[68437]: value = "task-2944975" [ 1164.269864] env[68437]: _type = "Task" [ 1164.269864] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.278089] env[68437]: DEBUG oslo_vmware.api [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.360511] env[68437]: DEBUG nova.network.neutron [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Updating instance_info_cache with network_info: [{"id": "eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50", "address": "fa:16:3e:3c:5a:f8", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb0283dc-e9", "ovs_interfaceid": "eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.659757] env[68437]: INFO nova.compute.manager [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Detaching volume eecd3a07-849b-4e77-9f5b-d19317a41bef [ 1164.692305] env[68437]: INFO nova.virt.block_device [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Attempting to driver detach volume eecd3a07-849b-4e77-9f5b-d19317a41bef from mountpoint /dev/sdb [ 1164.692758] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1164.692758] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591098', 'volume_id': 'eecd3a07-849b-4e77-9f5b-d19317a41bef', 'name': 'volume-eecd3a07-849b-4e77-9f5b-d19317a41bef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ee0450b5-66ce-41ed-9f4f-7ffa7b46f769', 'attached_at': '', 'detached_at': '', 'volume_id': 'eecd3a07-849b-4e77-9f5b-d19317a41bef', 'serial': 'eecd3a07-849b-4e77-9f5b-d19317a41bef'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1164.693633] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d87e9d-8939-44f0-8180-4792b6d63fe9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.719049] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14249eb4-e0a9-486b-a7f1-11bcd20bbb48 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.724374] env[68437]: DEBUG oslo_vmware.api [None req-0a60881b-28db-4006-ac37-03c772cb9627 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944972, 'name': SuspendVM_Task, 'duration_secs': 0.594457} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.724949] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0a60881b-28db-4006-ac37-03c772cb9627 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Suspended the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1164.725168] env[68437]: DEBUG nova.compute.manager [None req-0a60881b-28db-4006-ac37-03c772cb9627 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1164.725881] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444f0b7d-340a-45ad-b186-3d2d3df2f938 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.730306] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825f3437-6ba9-4c94-b273-b609e39cc454 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.753157] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f93593-1c6c-4f79-8286-7a84a9abeb96 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.767700] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] The volume has not been displaced from its original location: [datastore2] volume-eecd3a07-849b-4e77-9f5b-d19317a41bef/volume-eecd3a07-849b-4e77-9f5b-d19317a41bef.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1164.772752] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Reconfiguring VM instance instance-00000053 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1164.773665] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93010043-fa94-4574-b776-6abd42d76f21 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.794140] env[68437]: DEBUG oslo_vmware.api [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092854} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.795230] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.795414] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1164.795589] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1164.795759] env[68437]: INFO nova.compute.manager [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1164.795988] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1164.796249] env[68437]: DEBUG oslo_vmware.api [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1164.796249] env[68437]: value = "task-2944976" [ 1164.796249] env[68437]: _type = "Task" [ 1164.796249] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.796451] env[68437]: DEBUG nova.compute.manager [-] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1164.796510] env[68437]: DEBUG nova.network.neutron [-] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1164.805743] env[68437]: DEBUG oslo_vmware.api [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944976, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.814147] env[68437]: DEBUG nova.network.neutron [-] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1164.863117] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "refresh_cache-923133d9-3a2d-4309-83a7-ab59315ee4dc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.863516] env[68437]: DEBUG nova.compute.manager [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Instance network_info: |[{"id": "eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50", "address": "fa:16:3e:3c:5a:f8", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb0283dc-e9", "ovs_interfaceid": "eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1164.864032] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:5a:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1164.872177] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1164.872400] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1164.872637] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11b4db0e-7e3f-4ad5-962f-bebda3ca8f02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.892531] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1164.892531] env[68437]: value = "task-2944977" [ 1164.892531] env[68437]: _type = "Task" [ 1164.892531] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.900037] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944977, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.082839] env[68437]: DEBUG nova.compute.manager [req-2a3cf1d0-5eb0-4af5-8174-b4c7fc13e2ed req-f6a5b2f6-f555-43b6-97c1-da761c2c0946 service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Received event network-changed-eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1165.083111] env[68437]: DEBUG nova.compute.manager [req-2a3cf1d0-5eb0-4af5-8174-b4c7fc13e2ed req-f6a5b2f6-f555-43b6-97c1-da761c2c0946 service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Refreshing instance network info cache due to event network-changed-eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1165.083337] env[68437]: DEBUG oslo_concurrency.lockutils [req-2a3cf1d0-5eb0-4af5-8174-b4c7fc13e2ed req-f6a5b2f6-f555-43b6-97c1-da761c2c0946 service nova] Acquiring lock "refresh_cache-923133d9-3a2d-4309-83a7-ab59315ee4dc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.083494] env[68437]: DEBUG oslo_concurrency.lockutils [req-2a3cf1d0-5eb0-4af5-8174-b4c7fc13e2ed req-f6a5b2f6-f555-43b6-97c1-da761c2c0946 service nova] Acquired lock "refresh_cache-923133d9-3a2d-4309-83a7-ab59315ee4dc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.083656] env[68437]: DEBUG nova.network.neutron [req-2a3cf1d0-5eb0-4af5-8174-b4c7fc13e2ed req-f6a5b2f6-f555-43b6-97c1-da761c2c0946 service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Refreshing network info cache for port eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1165.308621] env[68437]: DEBUG oslo_vmware.api [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944976, 'name': ReconfigVM_Task, 'duration_secs': 0.229272} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.308994] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Reconfigured VM instance instance-00000053 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1165.313418] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-798325de-2ae2-4cbb-9390-31506300a871 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.323140] env[68437]: DEBUG nova.network.neutron [-] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.328982] env[68437]: DEBUG oslo_vmware.api [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1165.328982] env[68437]: value = "task-2944978" [ 1165.328982] env[68437]: _type = "Task" [ 1165.328982] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.339246] env[68437]: DEBUG oslo_vmware.api [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944978, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.401390] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944977, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.799037] env[68437]: DEBUG nova.network.neutron [req-2a3cf1d0-5eb0-4af5-8174-b4c7fc13e2ed req-f6a5b2f6-f555-43b6-97c1-da761c2c0946 service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Updated VIF entry in instance network info cache for port eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1165.799475] env[68437]: DEBUG nova.network.neutron [req-2a3cf1d0-5eb0-4af5-8174-b4c7fc13e2ed req-f6a5b2f6-f555-43b6-97c1-da761c2c0946 service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Updating instance_info_cache with network_info: [{"id": "eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50", "address": "fa:16:3e:3c:5a:f8", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb0283dc-e9", "ovs_interfaceid": "eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.825837] env[68437]: INFO nova.compute.manager [-] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Took 1.03 seconds to deallocate network for instance. [ 1165.839209] env[68437]: DEBUG oslo_vmware.api [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944978, 'name': ReconfigVM_Task, 'duration_secs': 0.15762} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.839510] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591098', 'volume_id': 'eecd3a07-849b-4e77-9f5b-d19317a41bef', 'name': 'volume-eecd3a07-849b-4e77-9f5b-d19317a41bef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ee0450b5-66ce-41ed-9f4f-7ffa7b46f769', 'attached_at': '', 'detached_at': '', 'volume_id': 'eecd3a07-849b-4e77-9f5b-d19317a41bef', 'serial': 'eecd3a07-849b-4e77-9f5b-d19317a41bef'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1165.904287] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944977, 'name': CreateVM_Task, 'duration_secs': 0.699385} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.904457] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1165.905155] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.905327] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.905648] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1165.905899] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67896b80-a01b-400f-9e70-e213734dfd59 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.910834] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1165.910834] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52298e1a-3161-8d8a-4ca9-b70d7affa301" [ 1165.910834] env[68437]: _type = "Task" [ 1165.910834] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.918831] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52298e1a-3161-8d8a-4ca9-b70d7affa301, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.063625] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.064031] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.064350] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.064576] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.064777] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.066899] env[68437]: INFO nova.compute.manager [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Terminating instance [ 1166.302254] env[68437]: DEBUG oslo_concurrency.lockutils [req-2a3cf1d0-5eb0-4af5-8174-b4c7fc13e2ed req-f6a5b2f6-f555-43b6-97c1-da761c2c0946 service nova] Releasing lock "refresh_cache-923133d9-3a2d-4309-83a7-ab59315ee4dc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.331690] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.332029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.332257] env[68437]: DEBUG nova.objects.instance [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lazy-loading 'resources' on Instance uuid 6fabc758-0d56-4adb-a54e-b9c8798a0151 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.378423] env[68437]: DEBUG nova.objects.instance [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'flavor' on Instance uuid ee0450b5-66ce-41ed-9f4f-7ffa7b46f769 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.421037] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52298e1a-3161-8d8a-4ca9-b70d7affa301, 'name': SearchDatastore_Task, 'duration_secs': 0.010434} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.421358] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.421584] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1166.421816] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.421963] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.422166] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1166.422421] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54c8490b-09d1-41fc-a840-6b4f9d0e6504 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.430706] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1166.430839] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1166.431582] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c755c93-5b38-40f9-9147-6c0331a8b2d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.436388] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1166.436388] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c75e48-764e-d3f1-0e56-966932ec7c4d" [ 1166.436388] env[68437]: _type = "Task" [ 1166.436388] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.443973] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c75e48-764e-d3f1-0e56-966932ec7c4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.570787] env[68437]: DEBUG nova.compute.manager [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1166.571124] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1166.572015] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e20f06-d759-4d14-9ce1-edd14f56c4a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.581500] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1166.581729] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21242e18-2927-4785-90b7-5cbd25699c58 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.640725] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1166.640977] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1166.641179] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleting the datastore file [datastore2] 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1166.641439] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da6cd98d-0adc-416b-a6bc-0588b3cf7e5d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.647842] env[68437]: DEBUG oslo_vmware.api [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1166.647842] env[68437]: value = "task-2944980" [ 1166.647842] env[68437]: _type = "Task" [ 1166.647842] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.656366] env[68437]: DEBUG oslo_vmware.api [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944980, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.947631] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c75e48-764e-d3f1-0e56-966932ec7c4d, 'name': SearchDatastore_Task, 'duration_secs': 0.010921} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.948439] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47c25e6e-32e8-4504-b900-84a401c17a20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.955822] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1166.955822] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8b591-8be8-a863-f60a-c520d984ed1c" [ 1166.955822] env[68437]: _type = "Task" [ 1166.955822] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.963339] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8b591-8be8-a863-f60a-c520d984ed1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.014050] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf816f6-68bf-482f-b14e-2e8016c4e28b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.021049] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1640efa0-6549-4ba6-aa63-25334e4465ec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.051481] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc50fc6-2239-45e6-8594-0c795bd49225 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.058505] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ce2683-8ab8-45a0-b2c1-bce8f23c9234 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.071572] env[68437]: DEBUG nova.compute.provider_tree [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.158774] env[68437]: DEBUG oslo_vmware.api [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2944980, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14171} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.159038] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1167.159228] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1167.159398] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1167.159564] env[68437]: INFO nova.compute.manager [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1167.159804] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1167.159993] env[68437]: DEBUG nova.compute.manager [-] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1167.160101] env[68437]: DEBUG nova.network.neutron [-] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1167.389624] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cdc62983-abcc-4d92-9a9e-d5627db9b323 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.232s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.400476] env[68437]: DEBUG nova.compute.manager [req-fadd7cce-71a9-456c-b144-9c036bb80d4b req-44fa0e4b-f879-4afe-b21f-292af2c8509f service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Received event network-vif-deleted-f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1167.401041] env[68437]: INFO nova.compute.manager [req-fadd7cce-71a9-456c-b144-9c036bb80d4b req-44fa0e4b-f879-4afe-b21f-292af2c8509f service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Neutron deleted interface f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf; detaching it from the instance and deleting it from the info cache [ 1167.401041] env[68437]: DEBUG nova.network.neutron [req-fadd7cce-71a9-456c-b144-9c036bb80d4b req-44fa0e4b-f879-4afe-b21f-292af2c8509f service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.466047] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8b591-8be8-a863-f60a-c520d984ed1c, 'name': SearchDatastore_Task, 'duration_secs': 0.01032} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.466320] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.466573] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 923133d9-3a2d-4309-83a7-ab59315ee4dc/923133d9-3a2d-4309-83a7-ab59315ee4dc.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1167.466823] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33af0533-7b23-4eee-b160-f9d6e89ef578 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.473061] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1167.473061] env[68437]: value = "task-2944981" [ 1167.473061] env[68437]: _type = "Task" [ 1167.473061] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.480781] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944981, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.575547] env[68437]: DEBUG nova.scheduler.client.report [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1167.881042] env[68437]: DEBUG nova.network.neutron [-] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.904262] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a92a7bc-0a37-41ce-96f3-948ec979a7b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.917022] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f738c6b-57d9-4010-b5d0-a9ece04960dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.948444] env[68437]: DEBUG nova.compute.manager [req-fadd7cce-71a9-456c-b144-9c036bb80d4b req-44fa0e4b-f879-4afe-b21f-292af2c8509f service nova] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Detach interface failed, port_id=f846ab07-2ca0-4e2d-bc6e-caac9f1a6ccf, reason: Instance 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1167.985012] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944981, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441749} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.985012] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 923133d9-3a2d-4309-83a7-ab59315ee4dc/923133d9-3a2d-4309-83a7-ab59315ee4dc.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1167.985012] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1167.985012] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9dea2f50-7999-4a92-ab5a-18757e814aa6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.991509] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1167.991509] env[68437]: value = "task-2944982" [ 1167.991509] env[68437]: _type = "Task" [ 1167.991509] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.000504] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.042583] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.043136] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.043369] env[68437]: INFO nova.compute.manager [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Shelving [ 1168.081431] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.749s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.109451] env[68437]: INFO nova.scheduler.client.report [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Deleted allocations for instance 6fabc758-0d56-4adb-a54e-b9c8798a0151 [ 1168.384605] env[68437]: INFO nova.compute.manager [-] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Took 1.22 seconds to deallocate network for instance. [ 1168.461341] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.461706] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.461817] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.462013] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.462191] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.464087] env[68437]: INFO nova.compute.manager [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Terminating instance [ 1168.500991] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060998} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.501255] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1168.501993] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed84f56-43dc-4912-9c1f-06155209b343 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.525517] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 923133d9-3a2d-4309-83a7-ab59315ee4dc/923133d9-3a2d-4309-83a7-ab59315ee4dc.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1168.525804] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44c1d338-6780-4dc9-bd36-a5bd204f1867 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.544981] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1168.544981] env[68437]: value = "task-2944983" [ 1168.544981] env[68437]: _type = "Task" [ 1168.544981] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.556382] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944983, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.617759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-16462a65-4455-4b8d-8e01-4abedd27a1ae tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "6fabc758-0d56-4adb-a54e-b9c8798a0151" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.491s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.890978] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.891309] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.891538] env[68437]: DEBUG nova.objects.instance [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'resources' on Instance uuid 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1168.967427] env[68437]: DEBUG nova.compute.manager [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1168.967642] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1168.968610] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88175cf4-8cb5-459e-bc93-c4d706973296 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.976140] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.976360] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c599e82-75b4-4b01-8592-b607a564d236 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.982075] env[68437]: DEBUG oslo_vmware.api [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1168.982075] env[68437]: value = "task-2944984" [ 1168.982075] env[68437]: _type = "Task" [ 1168.982075] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.989679] env[68437]: DEBUG oslo_vmware.api [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944984, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.055014] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.055386] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944983, 'name': ReconfigVM_Task, 'duration_secs': 0.283576} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.055596] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c73d0209-3a31-4916-a7b8-385a6f3186a6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.057166] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 923133d9-3a2d-4309-83a7-ab59315ee4dc/923133d9-3a2d-4309-83a7-ab59315ee4dc.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1169.057747] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e71e6da-7697-4ad4-a17f-48f91e5286cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.064320] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1169.064320] env[68437]: value = "task-2944986" [ 1169.064320] env[68437]: _type = "Task" [ 1169.064320] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.065573] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1169.065573] env[68437]: value = "task-2944985" [ 1169.065573] env[68437]: _type = "Task" [ 1169.065573] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.075782] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944986, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.078685] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.491364] env[68437]: DEBUG oslo_vmware.api [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944984, 'name': PowerOffVM_Task, 'duration_secs': 0.264107} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.493599] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.493774] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1169.494206] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ad1b56e-de07-4db4-b922-a92a1ec55cfa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.544893] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda7059a-0788-46b3-b7a7-69f21a65a3b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.552800] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cd4bb0-3224-43c2-b346-45a5ee394bd3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.556907] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1169.557134] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1169.557319] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleting the datastore file [datastore2] ee0450b5-66ce-41ed-9f4f-7ffa7b46f769 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1169.557882] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e78c268c-d138-4cb5-8bcd-492426082406 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.590940] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f11a5e2-25f6-4b7e-a40e-a0abff92e158 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.593173] env[68437]: DEBUG oslo_vmware.api [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1169.593173] env[68437]: value = "task-2944988" [ 1169.593173] env[68437]: _type = "Task" [ 1169.593173] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.603301] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944985, 'name': PowerOffVM_Task, 'duration_secs': 0.224938} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.603508] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944986, 'name': Rename_Task, 'duration_secs': 0.143043} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.605046] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7b6bbc-206f-425f-a414-155c7c2f1013 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.608556] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.608870] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1169.612358] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d220a76b-2321-49d1-9b2a-ffa9b031cb99 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.615280] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7753bf18-d6e6-46ef-bc66-94eca3166624 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.616938] env[68437]: DEBUG oslo_vmware.api [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.629582] env[68437]: DEBUG nova.compute.provider_tree [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.644759] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1169.644759] env[68437]: value = "task-2944989" [ 1169.644759] env[68437]: _type = "Task" [ 1169.644759] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.646377] env[68437]: DEBUG nova.scheduler.client.report [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1169.649889] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef9a8d0-8361-47b7-89e0-b6ffef74fc88 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.666582] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944989, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.742768] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "b4257b79-2723-43fd-b64f-74104802e048" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.743130] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "b4257b79-2723-43fd-b64f-74104802e048" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.743376] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "b4257b79-2723-43fd-b64f-74104802e048-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.743570] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "b4257b79-2723-43fd-b64f-74104802e048-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.743782] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "b4257b79-2723-43fd-b64f-74104802e048-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.746883] env[68437]: INFO nova.compute.manager [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Terminating instance [ 1170.102951] env[68437]: DEBUG oslo_vmware.api [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2944988, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179168} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.103300] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.103499] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1170.103678] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1170.103853] env[68437]: INFO nova.compute.manager [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1170.104170] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1170.104378] env[68437]: DEBUG nova.compute.manager [-] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1170.104468] env[68437]: DEBUG nova.network.neutron [-] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1170.153646] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.262s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.162690] env[68437]: DEBUG oslo_vmware.api [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944989, 'name': PowerOnVM_Task, 'duration_secs': 0.466173} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.163009] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1170.163264] env[68437]: INFO nova.compute.manager [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1170.163449] env[68437]: DEBUG nova.compute.manager [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1170.164421] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262528f8-d916-420f-9a38-8872617fb7f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.167752] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1170.168011] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1f0e1b16-6a61-4e6c-be38-f31e9aef8bdc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.172096] env[68437]: INFO nova.scheduler.client.report [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted allocations for instance 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f [ 1170.181132] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1170.181132] env[68437]: value = "task-2944990" [ 1170.181132] env[68437]: _type = "Task" [ 1170.181132] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.190278] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944990, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.250439] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "refresh_cache-b4257b79-2723-43fd-b64f-74104802e048" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.250576] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquired lock "refresh_cache-b4257b79-2723-43fd-b64f-74104802e048" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.250764] env[68437]: DEBUG nova.network.neutron [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1170.592301] env[68437]: DEBUG nova.compute.manager [req-99e3c356-abd2-4a87-8261-6e957aae16a9 req-753faea1-1ba2-4ec8-a4f9-1683a045461a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Received event network-vif-deleted-e35bd2c3-ec8e-4c14-90f7-e714e59882b2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1170.592576] env[68437]: INFO nova.compute.manager [req-99e3c356-abd2-4a87-8261-6e957aae16a9 req-753faea1-1ba2-4ec8-a4f9-1683a045461a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Neutron deleted interface e35bd2c3-ec8e-4c14-90f7-e714e59882b2; detaching it from the instance and deleting it from the info cache [ 1170.592784] env[68437]: DEBUG nova.network.neutron [req-99e3c356-abd2-4a87-8261-6e957aae16a9 req-753faea1-1ba2-4ec8-a4f9-1683a045461a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.692058] env[68437]: INFO nova.compute.manager [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Took 12.20 seconds to build instance. [ 1170.692662] env[68437]: DEBUG oslo_concurrency.lockutils [None req-48dde6ee-74fc-4efa-95bc-3c6ef1676425 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "96c7aa3f-9098-49fe-8f5f-c3c45110fb4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.629s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.699144] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944990, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.771191] env[68437]: DEBUG nova.network.neutron [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1170.819373] env[68437]: DEBUG nova.network.neutron [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.075298] env[68437]: DEBUG nova.network.neutron [-] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.095529] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1e6a88c-3d57-4ead-8933-4ee990880fcc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.104538] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f975dcb-73bf-43d8-9b76-1e46c3e6658b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.138048] env[68437]: DEBUG nova.compute.manager [req-99e3c356-abd2-4a87-8261-6e957aae16a9 req-753faea1-1ba2-4ec8-a4f9-1683a045461a service nova] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Detach interface failed, port_id=e35bd2c3-ec8e-4c14-90f7-e714e59882b2, reason: Instance ee0450b5-66ce-41ed-9f4f-7ffa7b46f769 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1171.195591] env[68437]: DEBUG oslo_concurrency.lockutils [None req-17b9c392-565b-45eb-be9f-59c03193ad31 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "923133d9-3a2d-4309-83a7-ab59315ee4dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.711s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.196061] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944990, 'name': CreateSnapshot_Task, 'duration_secs': 0.614161} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.196309] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1171.197052] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051c23a6-2792-4d71-9e96-a260e8dd715e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.322921] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Releasing lock "refresh_cache-b4257b79-2723-43fd-b64f-74104802e048" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.323502] env[68437]: DEBUG nova.compute.manager [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1171.323710] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1171.324700] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf14516b-a4d8-4686-b828-2332691711b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.332531] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.332531] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92ef5629-1574-4c62-b58f-2e014000b865 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.338980] env[68437]: DEBUG oslo_vmware.api [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1171.338980] env[68437]: value = "task-2944991" [ 1171.338980] env[68437]: _type = "Task" [ 1171.338980] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.347792] env[68437]: DEBUG oslo_vmware.api [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944991, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.578438] env[68437]: INFO nova.compute.manager [-] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Took 1.47 seconds to deallocate network for instance. [ 1171.714674] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1171.714989] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-57eb7e99-6925-4fd7-bc4d-3ffc2c5869a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.723171] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1171.723171] env[68437]: value = "task-2944992" [ 1171.723171] env[68437]: _type = "Task" [ 1171.723171] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.731082] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944992, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.756125] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "923133d9-3a2d-4309-83a7-ab59315ee4dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.756363] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "923133d9-3a2d-4309-83a7-ab59315ee4dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.756564] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "923133d9-3a2d-4309-83a7-ab59315ee4dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.756749] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "923133d9-3a2d-4309-83a7-ab59315ee4dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.756921] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "923133d9-3a2d-4309-83a7-ab59315ee4dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.758913] env[68437]: INFO nova.compute.manager [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Terminating instance [ 1171.848891] env[68437]: DEBUG oslo_vmware.api [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944991, 'name': PowerOffVM_Task, 'duration_secs': 0.149251} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.849236] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1171.849426] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1171.849679] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c97ad10b-89bd-44e0-8ddf-6a623a5e5e6d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.875706] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1171.875956] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1171.876171] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Deleting the datastore file [datastore1] b4257b79-2723-43fd-b64f-74104802e048 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1171.876445] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f0ad59f-c06a-4462-b023-9f4f6ffc46fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.882819] env[68437]: DEBUG oslo_vmware.api [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for the task: (returnval){ [ 1171.882819] env[68437]: value = "task-2944994" [ 1171.882819] env[68437]: _type = "Task" [ 1171.882819] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.890536] env[68437]: DEBUG oslo_vmware.api [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.086114] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.086448] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.086692] env[68437]: DEBUG nova.objects.instance [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'resources' on Instance uuid ee0450b5-66ce-41ed-9f4f-7ffa7b46f769 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.189115] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "00b76648-d27b-4002-80cb-366e64c32ecc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.189369] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.233577] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944992, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.262690] env[68437]: DEBUG nova.compute.manager [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1172.262919] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1172.263785] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8594aec5-c103-4eb4-b755-ab2f564f2f10 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.273009] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1172.273246] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4354bf53-c187-456e-b205-3c9ff97745b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.281166] env[68437]: DEBUG oslo_vmware.api [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1172.281166] env[68437]: value = "task-2944995" [ 1172.281166] env[68437]: _type = "Task" [ 1172.281166] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.288938] env[68437]: DEBUG oslo_vmware.api [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944995, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.392767] env[68437]: DEBUG oslo_vmware.api [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Task: {'id': task-2944994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107738} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.393057] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.393257] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1172.393455] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.393647] env[68437]: INFO nova.compute.manager [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] [instance: b4257b79-2723-43fd-b64f-74104802e048] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1172.393905] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1172.394149] env[68437]: DEBUG nova.compute.manager [-] [instance: b4257b79-2723-43fd-b64f-74104802e048] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1172.394248] env[68437]: DEBUG nova.network.neutron [-] [instance: b4257b79-2723-43fd-b64f-74104802e048] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1172.416724] env[68437]: DEBUG nova.network.neutron [-] [instance: b4257b79-2723-43fd-b64f-74104802e048] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1172.692146] env[68437]: DEBUG nova.compute.manager [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1172.735878] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944992, 'name': CloneVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.756268] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bb92d2-e91e-4a6e-bddf-c0733501217b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.763197] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662111c2-2c09-4849-9d08-a23599283467 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.794362] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed54876-321a-4400-9082-0a2d3840b9cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.803656] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e23f53-4b4c-4456-b41d-01acc10ea2fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.807320] env[68437]: DEBUG oslo_vmware.api [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944995, 'name': PowerOffVM_Task, 'duration_secs': 0.210557} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.807573] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1172.807742] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1172.808371] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-875f7274-4036-453e-841b-22b587f6a933 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.819578] env[68437]: DEBUG nova.compute.provider_tree [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.888933] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1172.889184] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1172.889312] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleting the datastore file [datastore1] 923133d9-3a2d-4309-83a7-ab59315ee4dc {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1172.889588] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efe2f50c-7764-4ae4-bb8a-f9ab2fc88025 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.896220] env[68437]: DEBUG oslo_vmware.api [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1172.896220] env[68437]: value = "task-2944997" [ 1172.896220] env[68437]: _type = "Task" [ 1172.896220] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.904671] env[68437]: DEBUG oslo_vmware.api [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.919428] env[68437]: DEBUG nova.network.neutron [-] [instance: b4257b79-2723-43fd-b64f-74104802e048] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.213774] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.235938] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2944992, 'name': CloneVM_Task, 'duration_secs': 1.014185} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.236219] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Created linked-clone VM from snapshot [ 1173.236939] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6944b39a-9fb6-47aa-9276-adfbaa9da319 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.243623] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Uploading image fa463cf5-5b17-4cb0-9385-6d7d061c3876 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1173.267219] env[68437]: DEBUG oslo_vmware.rw_handles [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1173.267219] env[68437]: value = "vm-591135" [ 1173.267219] env[68437]: _type = "VirtualMachine" [ 1173.267219] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1173.267466] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-eb937867-98c4-4e70-9f39-2ac167ed3b76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.274445] env[68437]: DEBUG oslo_vmware.rw_handles [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lease: (returnval){ [ 1173.274445] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523d01fe-0141-64e4-2d4f-47cdb214b9cd" [ 1173.274445] env[68437]: _type = "HttpNfcLease" [ 1173.274445] env[68437]: } obtained for exporting VM: (result){ [ 1173.274445] env[68437]: value = "vm-591135" [ 1173.274445] env[68437]: _type = "VirtualMachine" [ 1173.274445] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1173.274670] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the lease: (returnval){ [ 1173.274670] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523d01fe-0141-64e4-2d4f-47cdb214b9cd" [ 1173.274670] env[68437]: _type = "HttpNfcLease" [ 1173.274670] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1173.280484] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1173.280484] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523d01fe-0141-64e4-2d4f-47cdb214b9cd" [ 1173.280484] env[68437]: _type = "HttpNfcLease" [ 1173.280484] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1173.323503] env[68437]: DEBUG nova.scheduler.client.report [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1173.405875] env[68437]: DEBUG oslo_vmware.api [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2944997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189083} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.406152] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1173.406342] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1173.406515] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1173.406683] env[68437]: INFO nova.compute.manager [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1173.406913] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1173.407118] env[68437]: DEBUG nova.compute.manager [-] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1173.407213] env[68437]: DEBUG nova.network.neutron [-] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1173.421324] env[68437]: INFO nova.compute.manager [-] [instance: b4257b79-2723-43fd-b64f-74104802e048] Took 1.03 seconds to deallocate network for instance. [ 1173.710221] env[68437]: DEBUG nova.compute.manager [req-8799e630-5a90-4be6-b6a0-dd033a0b4bda req-e2c39214-b234-460e-a95f-f4e94caf6463 service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Received event network-vif-deleted-eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1173.710399] env[68437]: INFO nova.compute.manager [req-8799e630-5a90-4be6-b6a0-dd033a0b4bda req-e2c39214-b234-460e-a95f-f4e94caf6463 service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Neutron deleted interface eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50; detaching it from the instance and deleting it from the info cache [ 1173.711330] env[68437]: DEBUG nova.network.neutron [req-8799e630-5a90-4be6-b6a0-dd033a0b4bda req-e2c39214-b234-460e-a95f-f4e94caf6463 service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.783016] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1173.783016] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523d01fe-0141-64e4-2d4f-47cdb214b9cd" [ 1173.783016] env[68437]: _type = "HttpNfcLease" [ 1173.783016] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1173.783382] env[68437]: DEBUG oslo_vmware.rw_handles [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1173.783382] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523d01fe-0141-64e4-2d4f-47cdb214b9cd" [ 1173.783382] env[68437]: _type = "HttpNfcLease" [ 1173.783382] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1173.784074] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb2f406-8626-43bc-a752-14732b7af078 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.791684] env[68437]: DEBUG oslo_vmware.rw_handles [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5245e02c-1a3d-ec6e-f4a4-4dc3e9ee0e2a/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1173.791684] env[68437]: DEBUG oslo_vmware.rw_handles [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5245e02c-1a3d-ec6e-f4a4-4dc3e9ee0e2a/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1173.847979] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.761s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.851503] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.638s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.853086] env[68437]: INFO nova.compute.claims [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1173.879312] env[68437]: INFO nova.scheduler.client.report [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleted allocations for instance ee0450b5-66ce-41ed-9f4f-7ffa7b46f769 [ 1173.891083] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fdaeaf75-9408-48b8-b5a1-435c267bf39e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.927580] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.192961] env[68437]: DEBUG nova.network.neutron [-] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.213872] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-649b27c2-678f-4631-ac2c-39f29a9d8dc8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.222655] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248a2b10-c3fc-44d2-aeac-ad6412d42a27 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.256164] env[68437]: DEBUG nova.compute.manager [req-8799e630-5a90-4be6-b6a0-dd033a0b4bda req-e2c39214-b234-460e-a95f-f4e94caf6463 service nova] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Detach interface failed, port_id=eb0283dc-e9e2-4b9c-bf10-d3b353a1cd50, reason: Instance 923133d9-3a2d-4309-83a7-ab59315ee4dc could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1174.386810] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6d295539-80e5-44f2-97cc-472f006b8734 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "ee0450b5-66ce-41ed-9f4f-7ffa7b46f769" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.925s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.695731] env[68437]: INFO nova.compute.manager [-] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Took 1.29 seconds to deallocate network for instance. [ 1175.031295] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292c0514-66b8-4227-9c08-39998d9b6b35 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.039499] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98544a17-7684-4ba1-b5f7-76251d5b92b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.071088] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e37d0aa0-5138-492f-b4a0-64ff06541aa2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.078282] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f6b5cd-c9c7-444a-ad72-6aa478b1d5ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.091838] env[68437]: DEBUG nova.compute.provider_tree [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.112397] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.112612] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.112765] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.112911] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.113162] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.113310] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.113453] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.113584] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1175.113722] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.203421] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.595051] env[68437]: DEBUG nova.scheduler.client.report [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1175.617193] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.100774] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.101328] env[68437]: DEBUG nova.compute.manager [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1176.104203] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.177s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.104460] env[68437]: DEBUG nova.objects.instance [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lazy-loading 'resources' on Instance uuid b4257b79-2723-43fd-b64f-74104802e048 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1176.608166] env[68437]: DEBUG nova.compute.utils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1176.613622] env[68437]: DEBUG nova.compute.manager [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1176.613834] env[68437]: DEBUG nova.network.neutron [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1176.659257] env[68437]: DEBUG nova.policy [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ff3c9a96f10413f860946488fa85aee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38ad479949b24307b08e16fdb821c76f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1176.666308] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "8554a78c-c2d7-459d-a295-121da777dfd4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.666593] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.797054] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73722e53-05e5-40a9-9cf6-b98d2a3225da {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.807343] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51cea84-8569-4fbd-bee7-c47ec49ad88d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.839819] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f2caf5-8e52-4804-b72c-75f9ca8453f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.847996] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e628e40-3a7c-4ddc-b494-3de20a446d57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.862742] env[68437]: DEBUG nova.compute.provider_tree [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.066548] env[68437]: DEBUG nova.network.neutron [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Successfully created port: 4f16f068-3b53-4a8d-a82b-21114eb371ea {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1177.114308] env[68437]: DEBUG nova.compute.manager [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1177.171397] env[68437]: DEBUG nova.compute.manager [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1177.365981] env[68437]: DEBUG nova.scheduler.client.report [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1177.694020] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.871387] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.767s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.873773] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.671s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.874078] env[68437]: DEBUG nova.objects.instance [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lazy-loading 'resources' on Instance uuid 923133d9-3a2d-4309-83a7-ab59315ee4dc {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.894784] env[68437]: INFO nova.scheduler.client.report [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Deleted allocations for instance b4257b79-2723-43fd-b64f-74104802e048 [ 1178.125131] env[68437]: DEBUG nova.compute.manager [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1178.151854] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1178.152151] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.152321] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1178.152504] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.152652] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1178.152801] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1178.153049] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1178.153228] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1178.153403] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1178.153566] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1178.153741] env[68437]: DEBUG nova.virt.hardware [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1178.154677] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786296d3-bd0b-4399-8637-5472f60270b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.162778] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3468ae19-b717-444d-be88-d82aa5fb3622 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.403179] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7556a78-1072-4854-b373-a426f02ca3ac tempest-ServerShowV247Test-253174930 tempest-ServerShowV247Test-253174930-project-member] Lock "b4257b79-2723-43fd-b64f-74104802e048" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.660s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.510060] env[68437]: DEBUG nova.compute.manager [req-98b220f2-fe37-49ff-b4c0-d37ff5f8f6e1 req-c3e02d4d-7e58-4b49-8841-8bdfd92e7e12 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Received event network-vif-plugged-4f16f068-3b53-4a8d-a82b-21114eb371ea {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1178.510165] env[68437]: DEBUG oslo_concurrency.lockutils [req-98b220f2-fe37-49ff-b4c0-d37ff5f8f6e1 req-c3e02d4d-7e58-4b49-8841-8bdfd92e7e12 service nova] Acquiring lock "00b76648-d27b-4002-80cb-366e64c32ecc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.510380] env[68437]: DEBUG oslo_concurrency.lockutils [req-98b220f2-fe37-49ff-b4c0-d37ff5f8f6e1 req-c3e02d4d-7e58-4b49-8841-8bdfd92e7e12 service nova] Lock "00b76648-d27b-4002-80cb-366e64c32ecc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.510574] env[68437]: DEBUG oslo_concurrency.lockutils [req-98b220f2-fe37-49ff-b4c0-d37ff5f8f6e1 req-c3e02d4d-7e58-4b49-8841-8bdfd92e7e12 service nova] Lock "00b76648-d27b-4002-80cb-366e64c32ecc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.510742] env[68437]: DEBUG nova.compute.manager [req-98b220f2-fe37-49ff-b4c0-d37ff5f8f6e1 req-c3e02d4d-7e58-4b49-8841-8bdfd92e7e12 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] No waiting events found dispatching network-vif-plugged-4f16f068-3b53-4a8d-a82b-21114eb371ea {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1178.510926] env[68437]: WARNING nova.compute.manager [req-98b220f2-fe37-49ff-b4c0-d37ff5f8f6e1 req-c3e02d4d-7e58-4b49-8841-8bdfd92e7e12 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Received unexpected event network-vif-plugged-4f16f068-3b53-4a8d-a82b-21114eb371ea for instance with vm_state building and task_state spawning. [ 1178.547139] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0a7229-2bb7-47cb-8778-fd69d8c533e9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.556220] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d789b1a-a904-4713-8ab6-9bf8b4c41c64 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.588636] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a64c85-ccbf-4cdb-9f4c-ad27c95214c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.596147] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04781c2b-0658-4e0d-ae86-28ebf6033e39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.601211] env[68437]: DEBUG nova.network.neutron [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Successfully updated port: 4f16f068-3b53-4a8d-a82b-21114eb371ea {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1178.611087] env[68437]: DEBUG nova.compute.provider_tree [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.113353] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.113522] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.113680] env[68437]: DEBUG nova.network.neutron [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1179.115464] env[68437]: DEBUG nova.scheduler.client.report [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1179.622346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.748s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.625265] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.008s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.625265] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.625265] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1179.625265] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.931s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.627024] env[68437]: INFO nova.compute.claims [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1179.630680] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5423bc4a-8c2e-4665-9976-055cda4b0bac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.640718] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f182317b-b7ff-4aff-8628-072ae237267c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.655307] env[68437]: INFO nova.scheduler.client.report [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted allocations for instance 923133d9-3a2d-4309-83a7-ab59315ee4dc [ 1179.661289] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545ada63-71be-41fa-b692-4383869c130c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.664091] env[68437]: DEBUG nova.network.neutron [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1179.670466] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95317f99-183f-4092-9b6b-211b95afca16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.700571] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179418MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1179.700740] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.843805] env[68437]: DEBUG nova.network.neutron [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance_info_cache with network_info: [{"id": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "address": "fa:16:3e:45:d4:06", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f16f068-3b", "ovs_interfaceid": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.162737] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4de607cd-b3a5-4583-8e11-761a9bb17d83 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "923133d9-3a2d-4309-83a7-ab59315ee4dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.406s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.353019] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.353019] env[68437]: DEBUG nova.compute.manager [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Instance network_info: |[{"id": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "address": "fa:16:3e:45:d4:06", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f16f068-3b", "ovs_interfaceid": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1180.353019] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:d4:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89470f7f-1c8b-4c83-92b5-6f73a77c520f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f16f068-3b53-4a8d-a82b-21114eb371ea', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1180.360897] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1180.361162] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1180.361874] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b238b92f-94ad-4f13-bcdf-bc9be7a40f06 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.382884] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1180.382884] env[68437]: value = "task-2944999" [ 1180.382884] env[68437]: _type = "Task" [ 1180.382884] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.391063] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944999, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.539726] env[68437]: DEBUG nova.compute.manager [req-c2f7f9ea-f381-48b7-90d8-4c21d6bbbf20 req-dac08ac7-ba53-4980-8a30-37856b0fbcf8 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Received event network-changed-4f16f068-3b53-4a8d-a82b-21114eb371ea {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1180.539917] env[68437]: DEBUG nova.compute.manager [req-c2f7f9ea-f381-48b7-90d8-4c21d6bbbf20 req-dac08ac7-ba53-4980-8a30-37856b0fbcf8 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Refreshing instance network info cache due to event network-changed-4f16f068-3b53-4a8d-a82b-21114eb371ea. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1180.540154] env[68437]: DEBUG oslo_concurrency.lockutils [req-c2f7f9ea-f381-48b7-90d8-4c21d6bbbf20 req-dac08ac7-ba53-4980-8a30-37856b0fbcf8 service nova] Acquiring lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.540305] env[68437]: DEBUG oslo_concurrency.lockutils [req-c2f7f9ea-f381-48b7-90d8-4c21d6bbbf20 req-dac08ac7-ba53-4980-8a30-37856b0fbcf8 service nova] Acquired lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.540468] env[68437]: DEBUG nova.network.neutron [req-c2f7f9ea-f381-48b7-90d8-4c21d6bbbf20 req-dac08ac7-ba53-4980-8a30-37856b0fbcf8 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Refreshing network info cache for port 4f16f068-3b53-4a8d-a82b-21114eb371ea {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1180.712586] env[68437]: DEBUG oslo_vmware.rw_handles [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5245e02c-1a3d-ec6e-f4a4-4dc3e9ee0e2a/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1180.713689] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f9ac88-63ef-492b-bb6f-a35cf52631c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.721887] env[68437]: DEBUG oslo_vmware.rw_handles [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5245e02c-1a3d-ec6e-f4a4-4dc3e9ee0e2a/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1180.722069] env[68437]: ERROR oslo_vmware.rw_handles [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5245e02c-1a3d-ec6e-f4a4-4dc3e9ee0e2a/disk-0.vmdk due to incomplete transfer. [ 1180.722286] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e3fc0a2a-32f9-463d-99d4-5948759267e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.729417] env[68437]: DEBUG oslo_vmware.rw_handles [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5245e02c-1a3d-ec6e-f4a4-4dc3e9ee0e2a/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1180.729619] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Uploaded image fa463cf5-5b17-4cb0-9385-6d7d061c3876 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1180.731972] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1180.732219] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5aacb882-7b8a-4c72-996c-174056fafa23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.739752] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1180.739752] env[68437]: value = "task-2945000" [ 1180.739752] env[68437]: _type = "Task" [ 1180.739752] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.747484] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945000, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.790250] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd83828-9e56-4e13-8ba0-e1ba865eb98d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.797289] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2e0db7-3e6a-447d-b17a-74228aaf2c05 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.828383] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e411652-83c8-4292-8522-6deef1337f77 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.834983] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63a7fd3-4bc5-498c-97d9-fb9b86323c3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.847544] env[68437]: DEBUG nova.compute.provider_tree [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.892257] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944999, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.939895] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "7a05d783-afac-43a1-a715-c83b42c990c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.940156] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "7a05d783-afac-43a1-a715-c83b42c990c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.250534] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945000, 'name': Destroy_Task, 'duration_secs': 0.484493} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.250840] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Destroyed the VM [ 1181.251137] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1181.251382] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-83a11cb8-a1d1-4733-8978-ab81a5448c39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.260061] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1181.260061] env[68437]: value = "task-2945001" [ 1181.260061] env[68437]: _type = "Task" [ 1181.260061] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.268396] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945001, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.287715] env[68437]: DEBUG nova.network.neutron [req-c2f7f9ea-f381-48b7-90d8-4c21d6bbbf20 req-dac08ac7-ba53-4980-8a30-37856b0fbcf8 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updated VIF entry in instance network info cache for port 4f16f068-3b53-4a8d-a82b-21114eb371ea. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1181.288081] env[68437]: DEBUG nova.network.neutron [req-c2f7f9ea-f381-48b7-90d8-4c21d6bbbf20 req-dac08ac7-ba53-4980-8a30-37856b0fbcf8 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance_info_cache with network_info: [{"id": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "address": "fa:16:3e:45:d4:06", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f16f068-3b", "ovs_interfaceid": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.350813] env[68437]: DEBUG nova.scheduler.client.report [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.393788] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2944999, 'name': CreateVM_Task, 'duration_secs': 0.546825} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.393971] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1181.394661] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.394868] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.395213] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1181.395474] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e00807a-8a87-4772-8651-f295b446938a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.399746] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1181.399746] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52b359e2-eca4-3979-4aba-e9c8eff3d20a" [ 1181.399746] env[68437]: _type = "Task" [ 1181.399746] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.407189] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b359e2-eca4-3979-4aba-e9c8eff3d20a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.442431] env[68437]: DEBUG nova.compute.manager [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1181.769739] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945001, 'name': RemoveSnapshot_Task, 'duration_secs': 0.369759} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.770230] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1181.770622] env[68437]: DEBUG nova.compute.manager [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1181.771448] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893d6072-88e1-4c2d-b71b-b553ed1d5445 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.773863] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "4abf1477-2f0e-4a13-884a-c19420b3e435" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.774127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "4abf1477-2f0e-4a13-884a-c19420b3e435" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.774332] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "4abf1477-2f0e-4a13-884a-c19420b3e435-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.774516] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "4abf1477-2f0e-4a13-884a-c19420b3e435-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.774680] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "4abf1477-2f0e-4a13-884a-c19420b3e435-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.776806] env[68437]: INFO nova.compute.manager [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Terminating instance [ 1181.790985] env[68437]: DEBUG oslo_concurrency.lockutils [req-c2f7f9ea-f381-48b7-90d8-4c21d6bbbf20 req-dac08ac7-ba53-4980-8a30-37856b0fbcf8 service nova] Releasing lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.855775] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.230s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.856311] env[68437]: DEBUG nova.compute.manager [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1181.858820] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.158s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.910365] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52b359e2-eca4-3979-4aba-e9c8eff3d20a, 'name': SearchDatastore_Task, 'duration_secs': 0.010918} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.910660] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.910895] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1181.911176] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.911333] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.911515] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1181.911764] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8497e4ed-dedb-4ea4-b4d7-ac337ae2fae0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.919752] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1181.919924] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1181.920598] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd3236ee-5a50-4cf0-9c41-9e6c89aa9569 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.925424] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1181.925424] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52204bf1-8117-4fdd-c8c3-9ab934810542" [ 1181.925424] env[68437]: _type = "Task" [ 1181.925424] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.932589] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52204bf1-8117-4fdd-c8c3-9ab934810542, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.963147] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.284922] env[68437]: DEBUG nova.compute.manager [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1182.285164] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1182.285635] env[68437]: INFO nova.compute.manager [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Shelve offloading [ 1182.287388] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db1dc47-0fff-4d4c-88a0-bdd695b10056 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.297355] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1182.297564] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-afff198f-9b79-4bcc-bed4-b106fc4696a8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.306230] env[68437]: DEBUG oslo_vmware.api [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1182.306230] env[68437]: value = "task-2945002" [ 1182.306230] env[68437]: _type = "Task" [ 1182.306230] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.313796] env[68437]: DEBUG oslo_vmware.api [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945002, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.362076] env[68437]: DEBUG nova.compute.utils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1182.368174] env[68437]: DEBUG nova.compute.manager [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1182.368343] env[68437]: DEBUG nova.network.neutron [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1182.431312] env[68437]: DEBUG nova.policy [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6801cab23bf4aadb8d7f326f0643c32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73e8290afeb84bf3976cfa22d3452ca7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1182.436972] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52204bf1-8117-4fdd-c8c3-9ab934810542, 'name': SearchDatastore_Task, 'duration_secs': 0.008166} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.437804] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-291dbe26-3320-4133-9600-6dea5ed9a914 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.443978] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1182.443978] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ac9244-e3c8-20b9-06ba-d1b8f99d3fab" [ 1182.443978] env[68437]: _type = "Task" [ 1182.443978] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.452163] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ac9244-e3c8-20b9-06ba-d1b8f99d3fab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.794028] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1182.797017] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27c92f61-81ad-4d48-84c2-53d8281abecf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.800354] env[68437]: DEBUG nova.network.neutron [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Successfully created port: 9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1182.803870] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1182.803870] env[68437]: value = "task-2945003" [ 1182.803870] env[68437]: _type = "Task" [ 1182.803870] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.818869] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1182.819273] env[68437]: DEBUG nova.compute.manager [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1182.819670] env[68437]: DEBUG oslo_vmware.api [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945002, 'name': PowerOffVM_Task, 'duration_secs': 0.198364} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.820463] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b809d743-e95d-447e-89a3-454e47dfdd1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.823060] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1182.823371] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1182.823695] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f878a1a-d7dc-498e-a58f-62a76a2ba16a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.827967] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.828272] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.828550] env[68437]: DEBUG nova.network.neutron [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1182.869157] env[68437]: DEBUG nova.compute.manager [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1182.891466] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1182.891466] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1182.891466] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleting the datastore file [datastore1] 4abf1477-2f0e-4a13-884a-c19420b3e435 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1182.892249] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54798b3c-b37d-4b44-89c8-8d375a0f8412 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.896815] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8ccd7176-55c0-4118-a07e-3c4bdbba9795 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.897078] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 353ebb37-7e69-49d4-873e-2272cbfff6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.897078] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance e2143e07-8c8d-4008-bb73-29aae91baee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.897206] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 76d97a56-21a2-4363-a987-ef872f056510 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.897311] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 4abf1477-2f0e-4a13-884a-c19420b3e435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.897424] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 191b441c-2c9f-48f9-b83a-d539722e6375 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.897536] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance aff861ed-e792-480a-811e-c157c0606d08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.897647] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance e81e633d-34a6-443d-a2fe-95e6d8afa552 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.897756] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.897951] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 00b76648-d27b-4002-80cb-366e64c32ecc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.898018] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8554a78c-c2d7-459d-a295-121da777dfd4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1182.900260] env[68437]: DEBUG oslo_vmware.api [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1182.900260] env[68437]: value = "task-2945005" [ 1182.900260] env[68437]: _type = "Task" [ 1182.900260] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.909120] env[68437]: DEBUG oslo_vmware.api [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.954632] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ac9244-e3c8-20b9-06ba-d1b8f99d3fab, 'name': SearchDatastore_Task, 'duration_secs': 0.010186} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.954920] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.955209] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 00b76648-d27b-4002-80cb-366e64c32ecc/00b76648-d27b-4002-80cb-366e64c32ecc.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1182.955490] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bf15f80d-a2af-4c47-9b0d-881c5cd6accb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.961754] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1182.961754] env[68437]: value = "task-2945006" [ 1182.961754] env[68437]: _type = "Task" [ 1182.961754] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.969120] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.404506] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 7a05d783-afac-43a1-a715-c83b42c990c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1183.404734] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1183.404846] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1183.416800] env[68437]: DEBUG oslo_vmware.api [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173958} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.417824] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1183.418069] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1183.418262] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1183.418440] env[68437]: INFO nova.compute.manager [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1183.418697] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1183.419261] env[68437]: DEBUG nova.compute.manager [-] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1183.419343] env[68437]: DEBUG nova.network.neutron [-] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1183.475343] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945006, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497821} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.475703] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 00b76648-d27b-4002-80cb-366e64c32ecc/00b76648-d27b-4002-80cb-366e64c32ecc.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1183.475919] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1183.476185] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dceb0ac8-ace7-4ac2-a6d7-7e550281cf7d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.482395] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1183.482395] env[68437]: value = "task-2945007" [ 1183.482395] env[68437]: _type = "Task" [ 1183.482395] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.494079] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945007, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.606031] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f756e2-28e0-4cbd-9ef2-58c295bcf477 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.614210] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab269a3-a9e5-4482-bf12-7bec4fce3c45 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.649511] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af240151-c44d-4096-b749-d4937139a956 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.657341] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a11d5f-eae9-4354-ad03-0d5592be0d6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.671028] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.718289] env[68437]: DEBUG nova.network.neutron [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating instance_info_cache with network_info: [{"id": "5f058ce1-be0f-4b97-be84-11302a668781", "address": "fa:16:3e:4d:8c:0f", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f058ce1-be", "ovs_interfaceid": "5f058ce1-be0f-4b97-be84-11302a668781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.882282] env[68437]: DEBUG nova.compute.manager [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1183.908778] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1183.909043] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.909210] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1183.909394] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.909540] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1183.909685] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1183.909896] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1183.910065] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1183.910237] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1183.910399] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1183.910571] env[68437]: DEBUG nova.virt.hardware [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1183.911475] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7022d4d-4aad-4d48-80cb-88c0ad3c30fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.919642] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fa5e65-0fdb-4ebc-a5fd-21927fd0c4cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.970701] env[68437]: DEBUG nova.compute.manager [req-4bc5759d-ffb8-4116-b842-4bd126b03fc2 req-88bcb19c-9ea9-4e23-a76a-f7e612ff2fb8 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Received event network-vif-deleted-c58ce980-01f0-476a-b297-adac9a7fcdef {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1183.970971] env[68437]: INFO nova.compute.manager [req-4bc5759d-ffb8-4116-b842-4bd126b03fc2 req-88bcb19c-9ea9-4e23-a76a-f7e612ff2fb8 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Neutron deleted interface c58ce980-01f0-476a-b297-adac9a7fcdef; detaching it from the instance and deleting it from the info cache [ 1183.971100] env[68437]: DEBUG nova.network.neutron [req-4bc5759d-ffb8-4116-b842-4bd126b03fc2 req-88bcb19c-9ea9-4e23-a76a-f7e612ff2fb8 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.993396] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945007, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065578} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.993662] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1183.994410] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6849af1-b6df-4762-a00a-8a4b2a85f80d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.015888] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 00b76648-d27b-4002-80cb-366e64c32ecc/00b76648-d27b-4002-80cb-366e64c32ecc.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1184.016370] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da247635-dc6b-4e6f-9a43-983d27bf20bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.036313] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1184.036313] env[68437]: value = "task-2945008" [ 1184.036313] env[68437]: _type = "Task" [ 1184.036313] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.044012] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945008, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.174602] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1184.191270] env[68437]: DEBUG nova.compute.manager [req-d6903935-e993-42c8-8cf6-84c1bebf4ab6 req-4295d8dc-5a1c-4507-9779-7eb400ea874b service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Received event network-vif-plugged-9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1184.191528] env[68437]: DEBUG oslo_concurrency.lockutils [req-d6903935-e993-42c8-8cf6-84c1bebf4ab6 req-4295d8dc-5a1c-4507-9779-7eb400ea874b service nova] Acquiring lock "8554a78c-c2d7-459d-a295-121da777dfd4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.191687] env[68437]: DEBUG oslo_concurrency.lockutils [req-d6903935-e993-42c8-8cf6-84c1bebf4ab6 req-4295d8dc-5a1c-4507-9779-7eb400ea874b service nova] Lock "8554a78c-c2d7-459d-a295-121da777dfd4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.191848] env[68437]: DEBUG oslo_concurrency.lockutils [req-d6903935-e993-42c8-8cf6-84c1bebf4ab6 req-4295d8dc-5a1c-4507-9779-7eb400ea874b service nova] Lock "8554a78c-c2d7-459d-a295-121da777dfd4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.192017] env[68437]: DEBUG nova.compute.manager [req-d6903935-e993-42c8-8cf6-84c1bebf4ab6 req-4295d8dc-5a1c-4507-9779-7eb400ea874b service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] No waiting events found dispatching network-vif-plugged-9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1184.192189] env[68437]: WARNING nova.compute.manager [req-d6903935-e993-42c8-8cf6-84c1bebf4ab6 req-4295d8dc-5a1c-4507-9779-7eb400ea874b service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Received unexpected event network-vif-plugged-9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 for instance with vm_state building and task_state spawning. [ 1184.221299] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.296474] env[68437]: DEBUG nova.network.neutron [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Successfully updated port: 9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1184.454975] env[68437]: DEBUG nova.network.neutron [-] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.473351] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-098aeeaf-71b9-4ec9-9e92-0f14244a0ec4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.490331] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723b75d8-e7f4-40d6-a4d4-22355b558320 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.534973] env[68437]: DEBUG nova.compute.manager [req-4bc5759d-ffb8-4116-b842-4bd126b03fc2 req-88bcb19c-9ea9-4e23-a76a-f7e612ff2fb8 service nova] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Detach interface failed, port_id=c58ce980-01f0-476a-b297-adac9a7fcdef, reason: Instance 4abf1477-2f0e-4a13-884a-c19420b3e435 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1184.545743] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.548044] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1184.548311] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6a1416-0e65-47b9-9dd2-4f3085e33696 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.555433] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1184.555433] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0bdf327-f114-4e0a-a89d-c0b892691258 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.626212] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1184.626457] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1184.626646] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleting the datastore file [datastore2] aff861ed-e792-480a-811e-c157c0606d08 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1184.626918] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-688f5ece-704e-428b-995e-bb93f0a01f91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.633802] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1184.633802] env[68437]: value = "task-2945010" [ 1184.633802] env[68437]: _type = "Task" [ 1184.633802] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.642089] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.679709] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1184.679936] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.821s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.680262] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.717s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.682133] env[68437]: INFO nova.compute.claims [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1184.798708] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "refresh_cache-8554a78c-c2d7-459d-a295-121da777dfd4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.798984] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "refresh_cache-8554a78c-c2d7-459d-a295-121da777dfd4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.799400] env[68437]: DEBUG nova.network.neutron [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1184.958167] env[68437]: INFO nova.compute.manager [-] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Took 1.54 seconds to deallocate network for instance. [ 1185.047174] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945008, 'name': ReconfigVM_Task, 'duration_secs': 0.628595} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.047452] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 00b76648-d27b-4002-80cb-366e64c32ecc/00b76648-d27b-4002-80cb-366e64c32ecc.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1185.048082] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75a61099-694b-4af8-8ec5-a1f452e52e05 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.053570] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1185.053570] env[68437]: value = "task-2945011" [ 1185.053570] env[68437]: _type = "Task" [ 1185.053570] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.061033] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945011, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.143676] env[68437]: DEBUG oslo_vmware.api [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150288} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.144044] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1185.144298] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1185.144512] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1185.166523] env[68437]: INFO nova.scheduler.client.report [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted allocations for instance aff861ed-e792-480a-811e-c157c0606d08 [ 1185.220718] env[68437]: INFO nova.compute.manager [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Rebuilding instance [ 1185.261084] env[68437]: DEBUG nova.compute.manager [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1185.261962] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f44e35c-beaf-4916-99b7-df784bb6887f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.332761] env[68437]: DEBUG nova.network.neutron [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1185.455504] env[68437]: DEBUG nova.network.neutron [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Updating instance_info_cache with network_info: [{"id": "9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92", "address": "fa:16:3e:9a:0b:e3", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9039c1ee-0c", "ovs_interfaceid": "9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.465963] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.564363] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945011, 'name': Rename_Task, 'duration_secs': 0.137506} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.564740] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1185.564994] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6757ed0-06f2-4a13-873c-42952a006884 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.572475] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1185.572475] env[68437]: value = "task-2945012" [ 1185.572475] env[68437]: _type = "Task" [ 1185.572475] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.582690] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945012, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.671938] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.832423] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0d737e-bfb0-4839-8794-ff6c0c161731 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.840646] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b0a58f-f38d-4444-848c-af6ee6e8e79e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.875078] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376d5461-23b8-405a-9814-3f8da8ec02c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.882981] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bd72c4-1170-4ef0-bb7d-34653ee6dda0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.899461] env[68437]: DEBUG nova.compute.provider_tree [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1185.959446] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "refresh_cache-8554a78c-c2d7-459d-a295-121da777dfd4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.959866] env[68437]: DEBUG nova.compute.manager [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Instance network_info: |[{"id": "9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92", "address": "fa:16:3e:9a:0b:e3", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9039c1ee-0c", "ovs_interfaceid": "9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1185.961699] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:0b:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '305ccd93-08cb-4658-845c-d9b64952daf7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1185.968628] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1185.968871] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1185.969167] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86387cc9-3fc6-4f4c-b0a4-98fb756ceeaa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.990866] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1185.990866] env[68437]: value = "task-2945013" [ 1185.990866] env[68437]: _type = "Task" [ 1185.990866] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.998124] env[68437]: DEBUG nova.compute.manager [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received event network-vif-unplugged-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1185.998345] env[68437]: DEBUG oslo_concurrency.lockutils [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.998548] env[68437]: DEBUG oslo_concurrency.lockutils [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] Lock "aff861ed-e792-480a-811e-c157c0606d08-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.998710] env[68437]: DEBUG oslo_concurrency.lockutils [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] Lock "aff861ed-e792-480a-811e-c157c0606d08-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.998877] env[68437]: DEBUG nova.compute.manager [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] No waiting events found dispatching network-vif-unplugged-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1185.999098] env[68437]: WARNING nova.compute.manager [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received unexpected event network-vif-unplugged-5f058ce1-be0f-4b97-be84-11302a668781 for instance with vm_state shelved_offloaded and task_state unshelving. [ 1185.999554] env[68437]: DEBUG nova.compute.manager [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received event network-changed-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1185.999554] env[68437]: DEBUG nova.compute.manager [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Refreshing instance network info cache due to event network-changed-5f058ce1-be0f-4b97-be84-11302a668781. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1185.999661] env[68437]: DEBUG oslo_concurrency.lockutils [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] Acquiring lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.999725] env[68437]: DEBUG oslo_concurrency.lockutils [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] Acquired lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.999871] env[68437]: DEBUG nova.network.neutron [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Refreshing network info cache for port 5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1186.004614] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945013, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.084637] env[68437]: DEBUG oslo_vmware.api [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945012, 'name': PowerOnVM_Task, 'duration_secs': 0.472407} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.084999] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1186.085296] env[68437]: INFO nova.compute.manager [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Took 7.96 seconds to spawn the instance on the hypervisor. [ 1186.085946] env[68437]: DEBUG nova.compute.manager [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1186.087137] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727ec791-cb2c-4539-a41b-d171c71f6f50 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.217874] env[68437]: DEBUG nova.compute.manager [req-f62fa1c5-6391-41a0-bf4f-e188fd777e72 req-680b8287-e592-4194-8508-7734af2a87cc service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Received event network-changed-9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1186.217977] env[68437]: DEBUG nova.compute.manager [req-f62fa1c5-6391-41a0-bf4f-e188fd777e72 req-680b8287-e592-4194-8508-7734af2a87cc service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Refreshing instance network info cache due to event network-changed-9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1186.218200] env[68437]: DEBUG oslo_concurrency.lockutils [req-f62fa1c5-6391-41a0-bf4f-e188fd777e72 req-680b8287-e592-4194-8508-7734af2a87cc service nova] Acquiring lock "refresh_cache-8554a78c-c2d7-459d-a295-121da777dfd4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.218401] env[68437]: DEBUG oslo_concurrency.lockutils [req-f62fa1c5-6391-41a0-bf4f-e188fd777e72 req-680b8287-e592-4194-8508-7734af2a87cc service nova] Acquired lock "refresh_cache-8554a78c-c2d7-459d-a295-121da777dfd4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.218578] env[68437]: DEBUG nova.network.neutron [req-f62fa1c5-6391-41a0-bf4f-e188fd777e72 req-680b8287-e592-4194-8508-7734af2a87cc service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Refreshing network info cache for port 9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1186.275957] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.276291] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67fffabc-54ef-4b0b-a51a-e9764504f4d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.283714] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1186.283714] env[68437]: value = "task-2945014" [ 1186.283714] env[68437]: _type = "Task" [ 1186.283714] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.292175] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945014, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.401531] env[68437]: DEBUG nova.scheduler.client.report [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1186.499820] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945013, 'name': CreateVM_Task, 'duration_secs': 0.413636} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.499990] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1186.500687] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.500862] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.501225] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1186.501479] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53f74c9b-89f1-4541-a723-2c4b3abb5c3d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.507639] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1186.507639] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521e8e36-d257-aa47-2a04-1c51125e7ada" [ 1186.507639] env[68437]: _type = "Task" [ 1186.507639] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.515448] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521e8e36-d257-aa47-2a04-1c51125e7ada, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.602856] env[68437]: INFO nova.compute.manager [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Took 13.41 seconds to build instance. [ 1186.711642] env[68437]: DEBUG nova.network.neutron [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updated VIF entry in instance network info cache for port 5f058ce1-be0f-4b97-be84-11302a668781. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1186.711993] env[68437]: DEBUG nova.network.neutron [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating instance_info_cache with network_info: [{"id": "5f058ce1-be0f-4b97-be84-11302a668781", "address": "fa:16:3e:4d:8c:0f", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap5f058ce1-be", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.794083] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945014, 'name': PowerOffVM_Task, 'duration_secs': 0.174282} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.796333] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1186.797034] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.797290] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5965145e-6ea6-48e8-8c37-d86e02bb2c60 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.804015] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1186.804015] env[68437]: value = "task-2945015" [ 1186.804015] env[68437]: _type = "Task" [ 1186.804015] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.810737] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.897794] env[68437]: DEBUG nova.network.neutron [req-f62fa1c5-6391-41a0-bf4f-e188fd777e72 req-680b8287-e592-4194-8508-7734af2a87cc service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Updated VIF entry in instance network info cache for port 9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1186.898169] env[68437]: DEBUG nova.network.neutron [req-f62fa1c5-6391-41a0-bf4f-e188fd777e72 req-680b8287-e592-4194-8508-7734af2a87cc service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Updating instance_info_cache with network_info: [{"id": "9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92", "address": "fa:16:3e:9a:0b:e3", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9039c1ee-0c", "ovs_interfaceid": "9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.906385] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.906861] env[68437]: DEBUG nova.compute.manager [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1186.909662] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.444s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.909879] env[68437]: DEBUG nova.objects.instance [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'resources' on Instance uuid 4abf1477-2f0e-4a13-884a-c19420b3e435 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.020023] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521e8e36-d257-aa47-2a04-1c51125e7ada, 'name': SearchDatastore_Task, 'duration_secs': 0.011281} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.020023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.020023] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1187.020023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.020023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.020023] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1187.020023] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa96544a-e0aa-47df-81ff-759bf0923759 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.028145] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1187.028347] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1187.029026] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb73efe1-7830-47f3-b385-3acbbeeff043 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.033937] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1187.033937] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5266fdd8-ad44-d9d7-ea9d-851972ff7cc3" [ 1187.033937] env[68437]: _type = "Task" [ 1187.033937] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.041262] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5266fdd8-ad44-d9d7-ea9d-851972ff7cc3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.106436] env[68437]: DEBUG oslo_concurrency.lockutils [None req-37ede3dd-98cf-4c33-a437-0c227fd563ee tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.917s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.185195] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.214714] env[68437]: DEBUG oslo_concurrency.lockutils [req-364d0bfc-f242-4e4b-9853-0b2f4ab6b563 req-4728eb1c-4dae-4c76-b69b-2139350f8f66 service nova] Releasing lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.317049] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1187.317049] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1187.317049] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591112', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'name': 'volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9b98ff24-e9d1-4754-89d2-ee2daa54ad47', 'attached_at': '', 'detached_at': '', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'serial': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1187.317903] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ec8ee9-7f06-4d8c-be24-23edeadb83a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.335872] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae570084-b0a1-4bd4-a5be-7c5416119338 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.342526] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8a4f63-4f9a-4d56-a658-659b4ccf1281 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.361360] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3063e6e6-1ff1-420c-bcac-bd34882843c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.376660] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] The volume has not been displaced from its original location: [datastore1] volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3/volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1187.382020] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Reconfiguring VM instance instance-00000065 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1187.382304] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-374c74a8-6be1-45a9-8247-b12b5e0e873c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.400989] env[68437]: DEBUG oslo_concurrency.lockutils [req-f62fa1c5-6391-41a0-bf4f-e188fd777e72 req-680b8287-e592-4194-8508-7734af2a87cc service nova] Releasing lock "refresh_cache-8554a78c-c2d7-459d-a295-121da777dfd4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.401507] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1187.401507] env[68437]: value = "task-2945016" [ 1187.401507] env[68437]: _type = "Task" [ 1187.401507] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.409622] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945016, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.412155] env[68437]: DEBUG nova.compute.utils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1187.415797] env[68437]: DEBUG nova.compute.manager [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1187.416046] env[68437]: DEBUG nova.network.neutron [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1187.457428] env[68437]: DEBUG nova.policy [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f745cbd7edb641af8623447b00021ac6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b7dfebb79e54e4fba7e0b142f99d7eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1187.549983] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5266fdd8-ad44-d9d7-ea9d-851972ff7cc3, 'name': SearchDatastore_Task, 'duration_secs': 0.009537} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.550826] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91f5c445-8782-4d07-b7e5-fe46f5487d86 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.561760] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1187.561760] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52244937-bab6-de3a-c54f-c3cb55ec5c02" [ 1187.561760] env[68437]: _type = "Task" [ 1187.561760] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.573162] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52244937-bab6-de3a-c54f-c3cb55ec5c02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.575052] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932725a8-d509-4fed-a193-0e41db0fbb20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.582347] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdadc868-9964-4401-a5e0-d9a36671703a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.614260] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1485b0-bd57-44cd-9334-0e35ff3aeacb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.621958] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5724fee9-f745-41ca-9158-f6ca00184ecd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.636454] env[68437]: DEBUG nova.compute.provider_tree [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.801308] env[68437]: DEBUG nova.network.neutron [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Successfully created port: dabb93d9-4501-4176-a8b1-cea28a047927 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1187.912321] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945016, 'name': ReconfigVM_Task, 'duration_secs': 0.287156} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.912512] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Reconfigured VM instance instance-00000065 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1187.917565] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-029d9fb1-43ee-45b5-8e12-93aaade051f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.934600] env[68437]: DEBUG nova.compute.manager [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1187.942126] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1187.942126] env[68437]: value = "task-2945017" [ 1187.942126] env[68437]: _type = "Task" [ 1187.942126] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.951488] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945017, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.071725] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52244937-bab6-de3a-c54f-c3cb55ec5c02, 'name': SearchDatastore_Task, 'duration_secs': 0.012794} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.072033] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.072257] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 8554a78c-c2d7-459d-a295-121da777dfd4/8554a78c-c2d7-459d-a295-121da777dfd4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1188.072514] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-562771f0-1221-454d-a271-bc8ec40e2835 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.078644] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1188.078644] env[68437]: value = "task-2945018" [ 1188.078644] env[68437]: _type = "Task" [ 1188.078644] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.086057] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945018, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.139559] env[68437]: DEBUG nova.scheduler.client.report [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1188.197135] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "76d97a56-21a2-4363-a987-ef872f056510" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.197432] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.452539] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945017, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.523417] env[68437]: DEBUG nova.compute.manager [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Stashing vm_state: active {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1188.591414] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945018, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.645267] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.735s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.648249] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.976s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.648831] env[68437]: DEBUG nova.objects.instance [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'resources' on Instance uuid aff861ed-e792-480a-811e-c157c0606d08 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.667717] env[68437]: INFO nova.scheduler.client.report [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted allocations for instance 4abf1477-2f0e-4a13-884a-c19420b3e435 [ 1188.701167] env[68437]: INFO nova.compute.manager [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Detaching volume 29729ce2-5d97-4553-9b4b-d02c045e2fb3 [ 1188.739620] env[68437]: INFO nova.virt.block_device [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Attempting to driver detach volume 29729ce2-5d97-4553-9b4b-d02c045e2fb3 from mountpoint /dev/sdb [ 1188.739936] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1188.740185] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1188.741099] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db805cd-067c-41be-a6bd-8bca23c531f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.764752] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461a7f49-112e-4a2e-a11b-a929bffdb4d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.771736] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e188612b-3f3e-4616-aa4a-2b4e09440f49 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.792025] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4490a4-8bfd-4458-8f63-93192c7330c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.806260] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] The volume has not been displaced from its original location: [datastore1] volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3/volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1188.811741] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfiguring VM instance instance-0000005a to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1188.811996] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52d90bb7-0f12-40dd-b90d-644977cd332a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.828809] env[68437]: DEBUG oslo_vmware.api [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1188.828809] env[68437]: value = "task-2945019" [ 1188.828809] env[68437]: _type = "Task" [ 1188.828809] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.836295] env[68437]: DEBUG oslo_vmware.api [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945019, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.949033] env[68437]: DEBUG nova.compute.manager [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1188.957888] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945017, 'name': ReconfigVM_Task, 'duration_secs': 0.837153} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.958355] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591112', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'name': 'volume-32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9b98ff24-e9d1-4754-89d2-ee2daa54ad47', 'attached_at': '', 'detached_at': '', 'volume_id': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3', 'serial': '32c3b984-3df2-4cf5-8349-d6fc830a6ed3'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1188.958924] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1188.959728] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c78742d-3990-4f0a-a35f-ef9676349a26 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.967396] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1188.969648] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29dbcb05-9904-4fe9-b08e-542a43ad2bc8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.977814] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1188.978098] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1188.978270] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1188.978457] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1188.978603] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1188.978750] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1188.978956] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1188.979201] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1188.979304] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1188.979456] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1188.979626] env[68437]: DEBUG nova.virt.hardware [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1188.980453] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4c1a17-295d-45d1-9177-3e38fbefa4ec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.988365] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ea25e7-320f-4e6d-b6be-9c502bf1826d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.041598] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.089105] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945018, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659505} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.091017] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 8554a78c-c2d7-459d-a295-121da777dfd4/8554a78c-c2d7-459d-a295-121da777dfd4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1189.091017] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1189.091017] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-829cafc8-812c-4c14-a47f-b9ee04ce1100 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.096680] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1189.096680] env[68437]: value = "task-2945021" [ 1189.096680] env[68437]: _type = "Task" [ 1189.096680] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.104678] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945021, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.151560] env[68437]: DEBUG nova.objects.instance [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'numa_topology' on Instance uuid aff861ed-e792-480a-811e-c157c0606d08 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1189.175848] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ab9b636-27f4-425e-84af-0b4a71d8008f tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "4abf1477-2f0e-4a13-884a-c19420b3e435" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.402s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.273862] env[68437]: DEBUG nova.compute.manager [req-f3e1bf29-8552-4448-925a-46f0c8c0666b req-7fa62336-5c41-4d8a-87da-fb261d95cc72 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Received event network-vif-plugged-dabb93d9-4501-4176-a8b1-cea28a047927 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1189.274184] env[68437]: DEBUG oslo_concurrency.lockutils [req-f3e1bf29-8552-4448-925a-46f0c8c0666b req-7fa62336-5c41-4d8a-87da-fb261d95cc72 service nova] Acquiring lock "7a05d783-afac-43a1-a715-c83b42c990c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.274395] env[68437]: DEBUG oslo_concurrency.lockutils [req-f3e1bf29-8552-4448-925a-46f0c8c0666b req-7fa62336-5c41-4d8a-87da-fb261d95cc72 service nova] Lock "7a05d783-afac-43a1-a715-c83b42c990c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.274578] env[68437]: DEBUG oslo_concurrency.lockutils [req-f3e1bf29-8552-4448-925a-46f0c8c0666b req-7fa62336-5c41-4d8a-87da-fb261d95cc72 service nova] Lock "7a05d783-afac-43a1-a715-c83b42c990c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.274745] env[68437]: DEBUG nova.compute.manager [req-f3e1bf29-8552-4448-925a-46f0c8c0666b req-7fa62336-5c41-4d8a-87da-fb261d95cc72 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] No waiting events found dispatching network-vif-plugged-dabb93d9-4501-4176-a8b1-cea28a047927 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1189.274931] env[68437]: WARNING nova.compute.manager [req-f3e1bf29-8552-4448-925a-46f0c8c0666b req-7fa62336-5c41-4d8a-87da-fb261d95cc72 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Received unexpected event network-vif-plugged-dabb93d9-4501-4176-a8b1-cea28a047927 for instance with vm_state building and task_state spawning. [ 1189.342115] env[68437]: DEBUG oslo_vmware.api [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945019, 'name': ReconfigVM_Task, 'duration_secs': 0.309084} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.342327] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Reconfigured VM instance instance-0000005a to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1189.347115] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb05631c-5646-47b8-b755-23ef65c5263c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.359884] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1189.360115] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1189.360302] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Deleting the datastore file [datastore1] 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1189.360943] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fe4462c-24d2-449c-9f62-7139a300e1f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.365480] env[68437]: DEBUG oslo_vmware.api [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1189.365480] env[68437]: value = "task-2945022" [ 1189.365480] env[68437]: _type = "Task" [ 1189.365480] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.369818] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for the task: (returnval){ [ 1189.369818] env[68437]: value = "task-2945023" [ 1189.369818] env[68437]: _type = "Task" [ 1189.369818] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.376860] env[68437]: DEBUG oslo_vmware.api [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945022, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.383069] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.414982] env[68437]: DEBUG nova.network.neutron [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Successfully updated port: dabb93d9-4501-4176-a8b1-cea28a047927 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1189.605991] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945021, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219181} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.606311] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1189.607128] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3fdbd0-ccba-4a94-b6df-48d94752de7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.629102] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 8554a78c-c2d7-459d-a295-121da777dfd4/8554a78c-c2d7-459d-a295-121da777dfd4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.629460] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9f34719-0d79-4f92-abcf-1b91a5258225 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.649046] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1189.649046] env[68437]: value = "task-2945024" [ 1189.649046] env[68437]: _type = "Task" [ 1189.649046] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.657134] env[68437]: DEBUG nova.objects.base [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1189.659469] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945024, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.814852] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39931183-07a0-4fa6-b04e-0e14371dfe6b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.823030] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c84696-9b02-4f2c-8256-609bcf38bc7c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.854726] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f375617-dca0-48be-9838-f35e08314db8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.862304] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5ef620-67b8-4f04-a25f-d03ddbcda240 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.883449] env[68437]: DEBUG oslo_vmware.api [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945022, 'name': ReconfigVM_Task, 'duration_secs': 0.246859} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.884454] env[68437]: DEBUG nova.compute.provider_tree [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.885709] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591120', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'name': 'volume-29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '76d97a56-21a2-4363-a987-ef872f056510', 'attached_at': '', 'detached_at': '', 'volume_id': '29729ce2-5d97-4553-9b4b-d02c045e2fb3', 'serial': '29729ce2-5d97-4553-9b4b-d02c045e2fb3'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1189.890777] env[68437]: DEBUG oslo_vmware.api [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Task: {'id': task-2945023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196804} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.891116] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1189.891187] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1189.891383] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1189.920382] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "refresh_cache-7a05d783-afac-43a1-a715-c83b42c990c2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.920513] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "refresh_cache-7a05d783-afac-43a1-a715-c83b42c990c2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.920684] env[68437]: DEBUG nova.network.neutron [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1189.948009] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1189.948336] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adaab1e2-97be-4bdb-984d-253bda279095 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.958304] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76b9b66-b271-4b28-9d0a-ace0ba92faa4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.988605] env[68437]: ERROR nova.compute.manager [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Failed to detach volume 32c3b984-3df2-4cf5-8349-d6fc830a6ed3 from /dev/sda: nova.exception.InstanceNotFound: Instance 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 could not be found. [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Traceback (most recent call last): [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self.driver.rebuild(**kwargs) [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] raise NotImplementedError() [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] NotImplementedError [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] During handling of the above exception, another exception occurred: [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Traceback (most recent call last): [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self.driver.detach_volume(context, old_connection_info, [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] return self._volumeops.detach_volume(connection_info, instance) [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self._detach_volume_vmdk(connection_info, instance) [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] stable_ref.fetch_moref(session) [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] nova.exception.InstanceNotFound: Instance 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 could not be found. [ 1189.988605] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] [ 1190.106965] env[68437]: DEBUG nova.compute.utils [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Build of instance 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 aborted: Failed to rebuild volume backed instance. {{(pid=68437) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1190.109048] env[68437]: ERROR nova.compute.manager [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 aborted: Failed to rebuild volume backed instance. [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Traceback (most recent call last): [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self.driver.rebuild(**kwargs) [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] raise NotImplementedError() [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] NotImplementedError [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] During handling of the above exception, another exception occurred: [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Traceback (most recent call last): [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self._detach_root_volume(context, instance, root_bdm) [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] with excutils.save_and_reraise_exception(): [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self.force_reraise() [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] raise self.value [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self.driver.detach_volume(context, old_connection_info, [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] return self._volumeops.detach_volume(connection_info, instance) [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self._detach_volume_vmdk(connection_info, instance) [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] stable_ref.fetch_moref(session) [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] nova.exception.InstanceNotFound: Instance 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 could not be found. [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] During handling of the above exception, another exception occurred: [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Traceback (most recent call last): [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 11390, in _error_out_instance_on_exception [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] yield [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1190.109048] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self._do_rebuild_instance_with_claim( [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self._do_rebuild_instance( [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self._rebuild_default_impl(**kwargs) [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] self._rebuild_volume_backed_instance( [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] raise exception.BuildAbortException( [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] nova.exception.BuildAbortException: Build of instance 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 aborted: Failed to rebuild volume backed instance. [ 1190.110081] env[68437]: ERROR nova.compute.manager [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] [ 1190.158880] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945024, 'name': ReconfigVM_Task, 'duration_secs': 0.301945} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.159163] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 8554a78c-c2d7-459d-a295-121da777dfd4/8554a78c-c2d7-459d-a295-121da777dfd4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1190.159761] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9ee6d10-118d-4de7-9fc3-ac62a1dee95b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.165074] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1190.165074] env[68437]: value = "task-2945025" [ 1190.165074] env[68437]: _type = "Task" [ 1190.165074] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.172304] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945025, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.391594] env[68437]: DEBUG nova.scheduler.client.report [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.436096] env[68437]: DEBUG nova.objects.instance [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lazy-loading 'flavor' on Instance uuid 76d97a56-21a2-4363-a987-ef872f056510 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.452344] env[68437]: DEBUG nova.network.neutron [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1190.575192] env[68437]: DEBUG nova.network.neutron [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Updating instance_info_cache with network_info: [{"id": "dabb93d9-4501-4176-a8b1-cea28a047927", "address": "fa:16:3e:fe:6d:72", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdabb93d9-45", "ovs_interfaceid": "dabb93d9-4501-4176-a8b1-cea28a047927", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.675025] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945025, 'name': Rename_Task, 'duration_secs': 0.133289} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.675340] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1190.675584] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0068240-a51c-4b0b-8476-96facd0467f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.681651] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1190.681651] env[68437]: value = "task-2945026" [ 1190.681651] env[68437]: _type = "Task" [ 1190.681651] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.688789] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945026, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.897380] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.249s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.900414] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.859s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.919114] env[68437]: DEBUG nova.compute.manager [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Stashing vm_state: active {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1191.078496] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "refresh_cache-7a05d783-afac-43a1-a715-c83b42c990c2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.078821] env[68437]: DEBUG nova.compute.manager [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Instance network_info: |[{"id": "dabb93d9-4501-4176-a8b1-cea28a047927", "address": "fa:16:3e:fe:6d:72", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdabb93d9-45", "ovs_interfaceid": "dabb93d9-4501-4176-a8b1-cea28a047927", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1191.079279] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:6d:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dabb93d9-4501-4176-a8b1-cea28a047927', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1191.086858] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1191.087423] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1191.087650] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dafb40e1-0c59-4d31-aa32-6cdbb8af11af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.107449] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1191.107449] env[68437]: value = "task-2945027" [ 1191.107449] env[68437]: _type = "Task" [ 1191.107449] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.116607] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945027, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.193207] env[68437]: DEBUG oslo_vmware.api [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945026, 'name': PowerOnVM_Task, 'duration_secs': 0.4633} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.193510] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1191.193716] env[68437]: INFO nova.compute.manager [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1191.193898] env[68437]: DEBUG nova.compute.manager [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1191.194752] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af19eb40-eab4-46f8-9fd1-517775b24889 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.301430] env[68437]: DEBUG nova.compute.manager [req-230ddd1e-6b37-4d0f-a906-2799d6d964d2 req-eb4a5a29-1f72-4a24-bc8d-933bd6179050 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Received event network-changed-dabb93d9-4501-4176-a8b1-cea28a047927 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1191.301430] env[68437]: DEBUG nova.compute.manager [req-230ddd1e-6b37-4d0f-a906-2799d6d964d2 req-eb4a5a29-1f72-4a24-bc8d-933bd6179050 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Refreshing instance network info cache due to event network-changed-dabb93d9-4501-4176-a8b1-cea28a047927. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1191.301530] env[68437]: DEBUG oslo_concurrency.lockutils [req-230ddd1e-6b37-4d0f-a906-2799d6d964d2 req-eb4a5a29-1f72-4a24-bc8d-933bd6179050 service nova] Acquiring lock "refresh_cache-7a05d783-afac-43a1-a715-c83b42c990c2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.301725] env[68437]: DEBUG oslo_concurrency.lockutils [req-230ddd1e-6b37-4d0f-a906-2799d6d964d2 req-eb4a5a29-1f72-4a24-bc8d-933bd6179050 service nova] Acquired lock "refresh_cache-7a05d783-afac-43a1-a715-c83b42c990c2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.301936] env[68437]: DEBUG nova.network.neutron [req-230ddd1e-6b37-4d0f-a906-2799d6d964d2 req-eb4a5a29-1f72-4a24-bc8d-933bd6179050 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Refreshing network info cache for port dabb93d9-4501-4176-a8b1-cea28a047927 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1191.405867] env[68437]: INFO nova.compute.claims [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1191.411710] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f45bdb1d-e5d0-4dfb-b102-f04665b5dccc tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.369s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.413021] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.228s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.413021] env[68437]: INFO nova.compute.manager [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Unshelving [ 1191.437162] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.443402] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c5c6c0dc-8aae-4eac-91fa-eb5625230627 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.246s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.617546] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945027, 'name': CreateVM_Task, 'duration_secs': 0.394649} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.620497] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1191.621267] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.621456] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.621775] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1191.622367] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb8900dc-acc9-4acd-9fca-ad8310489931 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.626771] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1191.626771] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5276c5b7-b9b5-b1c3-7cf0-4b8c8fd0c679" [ 1191.626771] env[68437]: _type = "Task" [ 1191.626771] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.636737] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5276c5b7-b9b5-b1c3-7cf0-4b8c8fd0c679, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.712212] env[68437]: INFO nova.compute.manager [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Took 14.04 seconds to build instance. [ 1191.913134] env[68437]: INFO nova.compute.resource_tracker [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating resource usage from migration 91867768-22b8-454f-a726-07dda53e3078 [ 1192.029945] env[68437]: DEBUG nova.network.neutron [req-230ddd1e-6b37-4d0f-a906-2799d6d964d2 req-eb4a5a29-1f72-4a24-bc8d-933bd6179050 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Updated VIF entry in instance network info cache for port dabb93d9-4501-4176-a8b1-cea28a047927. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1192.030317] env[68437]: DEBUG nova.network.neutron [req-230ddd1e-6b37-4d0f-a906-2799d6d964d2 req-eb4a5a29-1f72-4a24-bc8d-933bd6179050 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Updating instance_info_cache with network_info: [{"id": "dabb93d9-4501-4176-a8b1-cea28a047927", "address": "fa:16:3e:fe:6d:72", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdabb93d9-45", "ovs_interfaceid": "dabb93d9-4501-4176-a8b1-cea28a047927", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.094019] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e8f4b5-ca2a-40f3-ac4c-8d225a332a4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.101440] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84be8ffe-f8ae-44af-b3ed-336ca3039b69 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.133451] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.137572] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6d9bcd-8f21-4502-80e2-4b8fd9535f6b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.148318] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5276c5b7-b9b5-b1c3-7cf0-4b8c8fd0c679, 'name': SearchDatastore_Task, 'duration_secs': 0.009924} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.150369] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.150598] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1192.150847] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.150981] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.151165] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.151653] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97110cae-7821-4304-92af-77dc6319e320 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.154352] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06dc06e8-914f-4045-a5f4-43b0d5eec964 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.168780] env[68437]: DEBUG nova.compute.provider_tree [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.170772] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.170960] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1192.171804] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d335b36-c28e-435b-ad27-7708eaef27b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.176754] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1192.176754] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52afcd73-57c8-2835-77b1-4154bd0a4460" [ 1192.176754] env[68437]: _type = "Task" [ 1192.176754] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.185140] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52afcd73-57c8-2835-77b1-4154bd0a4460, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.214262] env[68437]: DEBUG oslo_concurrency.lockutils [None req-183c0c78-8d06-4e4e-af01-d3fc12ebccdb tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.548s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.349326] env[68437]: DEBUG nova.compute.manager [req-14c6597a-4f00-4fa3-b8da-c9cdf65e6b2c req-ec6cb04a-ed2d-4b3b-9bee-be62abb33d96 service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Received event network-changed-9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1192.349520] env[68437]: DEBUG nova.compute.manager [req-14c6597a-4f00-4fa3-b8da-c9cdf65e6b2c req-ec6cb04a-ed2d-4b3b-9bee-be62abb33d96 service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Refreshing instance network info cache due to event network-changed-9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1192.349840] env[68437]: DEBUG oslo_concurrency.lockutils [req-14c6597a-4f00-4fa3-b8da-c9cdf65e6b2c req-ec6cb04a-ed2d-4b3b-9bee-be62abb33d96 service nova] Acquiring lock "refresh_cache-8554a78c-c2d7-459d-a295-121da777dfd4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.349906] env[68437]: DEBUG oslo_concurrency.lockutils [req-14c6597a-4f00-4fa3-b8da-c9cdf65e6b2c req-ec6cb04a-ed2d-4b3b-9bee-be62abb33d96 service nova] Acquired lock "refresh_cache-8554a78c-c2d7-459d-a295-121da777dfd4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.350032] env[68437]: DEBUG nova.network.neutron [req-14c6597a-4f00-4fa3-b8da-c9cdf65e6b2c req-ec6cb04a-ed2d-4b3b-9bee-be62abb33d96 service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Refreshing network info cache for port 9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1192.360297] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Acquiring lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.360506] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.360705] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Acquiring lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.360919] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.361098] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.362800] env[68437]: INFO nova.compute.manager [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Terminating instance [ 1192.424904] env[68437]: DEBUG nova.compute.utils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1192.532661] env[68437]: DEBUG oslo_concurrency.lockutils [req-230ddd1e-6b37-4d0f-a906-2799d6d964d2 req-eb4a5a29-1f72-4a24-bc8d-933bd6179050 service nova] Releasing lock "refresh_cache-7a05d783-afac-43a1-a715-c83b42c990c2" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.645150] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "76d97a56-21a2-4363-a987-ef872f056510" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.645437] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.645657] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "76d97a56-21a2-4363-a987-ef872f056510-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.645889] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.646083] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.648304] env[68437]: INFO nova.compute.manager [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Terminating instance [ 1192.673475] env[68437]: DEBUG nova.scheduler.client.report [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1192.687294] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52afcd73-57c8-2835-77b1-4154bd0a4460, 'name': SearchDatastore_Task, 'duration_secs': 0.008223} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.688073] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbb2ba43-204f-45bb-9708-7e1930beec9c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.694518] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1192.694518] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523c274e-0ebb-59ef-469c-965ebf02992a" [ 1192.694518] env[68437]: _type = "Task" [ 1192.694518] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.702368] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523c274e-0ebb-59ef-469c-965ebf02992a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.865920] env[68437]: DEBUG nova.compute.manager [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1192.866288] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ab51a6f-454e-4563-8ba5-6cb1467a1d74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.875839] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a332a65d-6fe9-4da7-b499-8ca62a6c8da4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.908352] env[68437]: WARNING nova.virt.vmwareapi.driver [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 could not be found. [ 1192.908577] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1192.908909] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e0b824a-4383-4f3d-a3ab-d848452b77ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.917257] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd40219-2149-4476-b15d-ebea848add79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.929765] env[68437]: INFO nova.virt.block_device [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Booting with volume a6d824b6-bc69-4e16-83a3-57fdea03f8a2 at /dev/sdb [ 1192.950574] env[68437]: WARNING nova.virt.vmwareapi.vmops [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 could not be found. [ 1192.950808] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1192.951015] env[68437]: INFO nova.compute.manager [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1192.951284] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1192.951553] env[68437]: DEBUG nova.compute.manager [-] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1192.951644] env[68437]: DEBUG nova.network.neutron [-] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1192.963788] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10e815cf-5a88-4bfd-96db-73be158d7461 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.972688] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad0c80c-11b0-45d8-826f-ff2dda87da4d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.005491] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e10ec7ee-abc3-4d3d-9fad-e9acdb0eceec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.014200] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19074794-f929-495a-854d-f67926ae27f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.049433] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc70306f-3e31-488e-b4ee-acd5e8ecd8ee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.056346] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b612fc-2453-4912-a98d-425f6b69b659 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.070292] env[68437]: DEBUG nova.virt.block_device [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating existing volume attachment record: 66ea7ad0-a60c-43ab-9a4e-91ad36de2f80 {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1193.095392] env[68437]: DEBUG nova.network.neutron [req-14c6597a-4f00-4fa3-b8da-c9cdf65e6b2c req-ec6cb04a-ed2d-4b3b-9bee-be62abb33d96 service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Updated VIF entry in instance network info cache for port 9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1193.095955] env[68437]: DEBUG nova.network.neutron [req-14c6597a-4f00-4fa3-b8da-c9cdf65e6b2c req-ec6cb04a-ed2d-4b3b-9bee-be62abb33d96 service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Updating instance_info_cache with network_info: [{"id": "9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92", "address": "fa:16:3e:9a:0b:e3", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9039c1ee-0c", "ovs_interfaceid": "9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.151739] env[68437]: DEBUG nova.compute.manager [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1193.152081] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1193.152891] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b4a30d-c4d9-4b89-b849-458439397f62 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.161184] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.161663] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e486b259-c140-455c-b3bd-099f2ad21006 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.168403] env[68437]: DEBUG oslo_vmware.api [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1193.168403] env[68437]: value = "task-2945028" [ 1193.168403] env[68437]: _type = "Task" [ 1193.168403] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.176795] env[68437]: DEBUG oslo_vmware.api [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.179017] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.279s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.179244] env[68437]: INFO nova.compute.manager [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Migrating [ 1193.186190] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.749s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.209091] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523c274e-0ebb-59ef-469c-965ebf02992a, 'name': SearchDatastore_Task, 'duration_secs': 0.008773} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.209261] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.209521] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 7a05d783-afac-43a1-a715-c83b42c990c2/7a05d783-afac-43a1-a715-c83b42c990c2.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1193.210313] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27e3e8c0-d57a-4169-9850-873cd0c96499 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.218756] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1193.218756] env[68437]: value = "task-2945030" [ 1193.218756] env[68437]: _type = "Task" [ 1193.218756] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.227548] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945030, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.598836] env[68437]: DEBUG oslo_concurrency.lockutils [req-14c6597a-4f00-4fa3-b8da-c9cdf65e6b2c req-ec6cb04a-ed2d-4b3b-9bee-be62abb33d96 service nova] Releasing lock "refresh_cache-8554a78c-c2d7-459d-a295-121da777dfd4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.683751] env[68437]: DEBUG oslo_vmware.api [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945028, 'name': PowerOffVM_Task, 'duration_secs': 0.275214} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.684090] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1193.684294] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1193.684626] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34bb7d81-69e0-4340-9fd4-19476c87d6a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.699858] env[68437]: INFO nova.compute.claims [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1193.705030] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.705302] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.705493] env[68437]: DEBUG nova.network.neutron [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1193.738509] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945030, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.759136] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1193.759504] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1193.759755] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleting the datastore file [datastore1] 76d97a56-21a2-4363-a987-ef872f056510 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1193.760163] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-077f4d8b-42e1-4c15-a7ef-d98817e98fc8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.770279] env[68437]: DEBUG oslo_vmware.api [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1193.770279] env[68437]: value = "task-2945034" [ 1193.770279] env[68437]: _type = "Task" [ 1193.770279] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.785611] env[68437]: DEBUG oslo_vmware.api [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.207418] env[68437]: INFO nova.compute.resource_tracker [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating resource usage from migration c451d302-0db3-4d94-a101-fdcd13f3e017 [ 1194.228424] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945030, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582623} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.228726] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 7a05d783-afac-43a1-a715-c83b42c990c2/7a05d783-afac-43a1-a715-c83b42c990c2.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1194.228952] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1194.229512] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33bf2322-fb6e-4d22-9244-4e28b1903a70 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.239600] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1194.239600] env[68437]: value = "task-2945035" [ 1194.239600] env[68437]: _type = "Task" [ 1194.239600] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.251354] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945035, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.283019] env[68437]: DEBUG nova.network.neutron [-] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.287493] env[68437]: DEBUG oslo_vmware.api [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276385} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.287493] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1194.287493] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1194.287667] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1194.287697] env[68437]: INFO nova.compute.manager [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1194.288731] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1194.288731] env[68437]: DEBUG nova.compute.manager [-] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1194.288731] env[68437]: DEBUG nova.network.neutron [-] [instance: 76d97a56-21a2-4363-a987-ef872f056510] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1194.395553] env[68437]: DEBUG nova.compute.manager [req-45d4a7df-3fb8-459b-9c95-6a0d41610b78 req-d064f86d-f26b-4f89-aac6-75e5ec428a6c service nova] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Received event network-vif-deleted-9c369f5b-7c68-4b57-8c1e-cc2b007af652 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1194.449203] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89366bc3-196c-4608-adbb-324f523e976b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.457618] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a961f19-1cd4-4ca3-802b-19b59b2eebf4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.492985] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a88340b-96c2-483a-91fe-bafb822eb077 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.501019] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac285fc-086b-4168-adcb-c1610ded313c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.521029] env[68437]: DEBUG nova.compute.provider_tree [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.532603] env[68437]: DEBUG nova.network.neutron [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance_info_cache with network_info: [{"id": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "address": "fa:16:3e:45:d4:06", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f16f068-3b", "ovs_interfaceid": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.749736] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945035, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.232349} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.750084] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1194.750915] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346f727d-d2a5-4304-8924-027d244f3786 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.772947] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 7a05d783-afac-43a1-a715-c83b42c990c2/7a05d783-afac-43a1-a715-c83b42c990c2.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1194.773248] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d257b8fb-a90a-4d6d-ab0d-4f68405c1d42 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.788952] env[68437]: INFO nova.compute.manager [-] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Took 1.84 seconds to deallocate network for instance. [ 1194.792599] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1194.792599] env[68437]: value = "task-2945036" [ 1194.792599] env[68437]: _type = "Task" [ 1194.792599] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.800556] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945036, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.016612] env[68437]: DEBUG nova.compute.manager [req-8f47541b-04e4-4490-b3fc-19503458ae1a req-1bbfa0a6-47ce-417f-906a-aff7e0fb4a94 service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Received event network-vif-deleted-520c7db4-23e9-44bf-846b-9f1eb94579f7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1195.016811] env[68437]: INFO nova.compute.manager [req-8f47541b-04e4-4490-b3fc-19503458ae1a req-1bbfa0a6-47ce-417f-906a-aff7e0fb4a94 service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Neutron deleted interface 520c7db4-23e9-44bf-846b-9f1eb94579f7; detaching it from the instance and deleting it from the info cache [ 1195.017043] env[68437]: DEBUG nova.network.neutron [req-8f47541b-04e4-4490-b3fc-19503458ae1a req-1bbfa0a6-47ce-417f-906a-aff7e0fb4a94 service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.021457] env[68437]: DEBUG nova.scheduler.client.report [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1195.035935] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.304899] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.337665] env[68437]: INFO nova.compute.manager [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Took 0.55 seconds to detach 1 volumes for instance. [ 1195.339830] env[68437]: DEBUG nova.compute.manager [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Deleting volume: 32c3b984-3df2-4cf5-8349-d6fc830a6ed3 {{(pid=68437) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1195.501138] env[68437]: DEBUG nova.network.neutron [-] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.519155] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9decf5ce-2bc8-47b0-8407-a5380c049030 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.530201] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c5d93f-3e1f-4962-b3e5-21d711de1d67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.542537] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.356s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.542788] env[68437]: INFO nova.compute.manager [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Migrating [ 1195.558082] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.424s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.588039] env[68437]: DEBUG nova.compute.manager [req-8f47541b-04e4-4490-b3fc-19503458ae1a req-1bbfa0a6-47ce-417f-906a-aff7e0fb4a94 service nova] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Detach interface failed, port_id=520c7db4-23e9-44bf-846b-9f1eb94579f7, reason: Instance 76d97a56-21a2-4363-a987-ef872f056510 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1195.783713] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cebb7b-99b9-4224-a4ab-ee14b242b99a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.791888] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422019c0-5ad9-42c8-813c-a5579f2acfec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.803663] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945036, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.830723] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d04f9bf-dc80-4ce7-97cb-78e964f7d6a5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.838282] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c00503-118d-4ef8-8c29-fa285bb62e71 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.851479] env[68437]: DEBUG nova.compute.provider_tree [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.884290] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.004630] env[68437]: INFO nova.compute.manager [-] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Took 1.72 seconds to deallocate network for instance. [ 1196.066889] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.067141] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.067448] env[68437]: DEBUG nova.network.neutron [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1196.305443] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945036, 'name': ReconfigVM_Task, 'duration_secs': 1.130648} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.305731] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 7a05d783-afac-43a1-a715-c83b42c990c2/7a05d783-afac-43a1-a715-c83b42c990c2.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1196.306370] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-423bf7a3-b217-430c-9af7-cbeeaadfc95a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.312722] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1196.312722] env[68437]: value = "task-2945039" [ 1196.312722] env[68437]: _type = "Task" [ 1196.312722] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.319961] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945039, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.354994] env[68437]: DEBUG nova.scheduler.client.report [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1196.511231] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1196.564089] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a394ad8a-30bd-4011-b8b2-ce2a213cc271 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.584249] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance '00b76648-d27b-4002-80cb-366e64c32ecc' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1196.794772] env[68437]: DEBUG nova.network.neutron [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance_info_cache with network_info: [{"id": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "address": "fa:16:3e:cc:6f:88", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec75d03-3e", "ovs_interfaceid": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.823676] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945039, 'name': Rename_Task, 'duration_secs': 0.175068} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.823968] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1196.824234] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8b0b534-a67e-4580-9d65-e9c344e25c7e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.830222] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1196.830222] env[68437]: value = "task-2945040" [ 1196.830222] env[68437]: _type = "Task" [ 1196.830222] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.837302] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945040, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.859554] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.302s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.859764] env[68437]: INFO nova.compute.manager [None req-ffae7ffa-7b74-4f7f-ae67-4f73946acb26 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Successfully reverted task state from rebuilding on failure for instance. [ 1196.865345] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.981s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.865345] env[68437]: DEBUG nova.objects.instance [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lazy-loading 'resources' on Instance uuid 9b98ff24-e9d1-4754-89d2-ee2daa54ad47 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.090062] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1197.090388] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-973ad88d-7b58-4487-90f9-580f40555491 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.098440] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1197.098440] env[68437]: value = "task-2945041" [ 1197.098440] env[68437]: _type = "Task" [ 1197.098440] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.106310] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.297963] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.340436] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945040, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.508526] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afd4f7f-2d98-433c-8609-96a5a91c4b14 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.516163] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b56279-5a39-45d5-8239-e1ef117ceb92 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.548116] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f8144d-5b68-4ff9-a008-b1d03d2741ad {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.556149] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bdc109-29cb-4b1b-a8c7-c3b82b29e446 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.572605] env[68437]: DEBUG nova.compute.provider_tree [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.607540] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945041, 'name': PowerOffVM_Task, 'duration_secs': 0.196511} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.607817] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1197.607997] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance '00b76648-d27b-4002-80cb-366e64c32ecc' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1197.841056] env[68437]: DEBUG oslo_vmware.api [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945040, 'name': PowerOnVM_Task, 'duration_secs': 0.725412} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.841390] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1197.841574] env[68437]: INFO nova.compute.manager [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Took 8.89 seconds to spawn the instance on the hypervisor. [ 1197.841712] env[68437]: DEBUG nova.compute.manager [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1197.842482] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcaaa8e-0f0e-4ae1-9088-86d8dfc703b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.076539] env[68437]: DEBUG nova.scheduler.client.report [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1198.114146] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1198.114464] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1198.114676] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1198.114922] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1198.115134] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1198.115346] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1198.115568] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1198.115729] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1198.115894] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1198.116072] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1198.116252] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1198.123340] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d8140f6-1162-4c9c-b822-5ab2c2dcec57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.140820] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1198.140820] env[68437]: value = "task-2945042" [ 1198.140820] env[68437]: _type = "Task" [ 1198.140820] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.151318] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.361023] env[68437]: INFO nova.compute.manager [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Took 16.41 seconds to build instance. [ 1198.581603] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.716s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.584500] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.073s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.584500] env[68437]: DEBUG nova.objects.instance [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lazy-loading 'resources' on Instance uuid 76d97a56-21a2-4363-a987-ef872f056510 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1198.651139] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945042, 'name': ReconfigVM_Task, 'duration_secs': 0.228575} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.651416] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance '00b76648-d27b-4002-80cb-366e64c32ecc' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1198.685427] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.812671] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd68e94-480a-4e86-92b0-21f5c5078ebb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.832991] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance '8ccd7176-55c0-4118-a07e-3c4bdbba9795' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1198.863167] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3931cf20-386a-4836-b433-f9d09ac283c5 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "7a05d783-afac-43a1-a715-c83b42c990c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.923s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.017045] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "55076395-fd1d-48a7-ab85-fe0eb03afa19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.017045] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "55076395-fd1d-48a7-ab85-fe0eb03afa19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.099549] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b3d54b12-ba93-4efe-a032-116c6a5f66b5 tempest-ServerActionsV293TestJSON-1135273708 tempest-ServerActionsV293TestJSON-1135273708-project-member] Lock "9b98ff24-e9d1-4754-89d2-ee2daa54ad47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.739s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.157629] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1199.157901] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1199.158082] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1199.158273] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1199.158421] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1199.158574] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1199.158785] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1199.158945] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1199.159193] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1199.159373] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1199.159549] env[68437]: DEBUG nova.virt.hardware [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1199.165187] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1199.167942] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbd08941-f6bf-4271-8f9c-cd7fe332d1a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.189455] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1199.189455] env[68437]: value = "task-2945043" [ 1199.189455] env[68437]: _type = "Task" [ 1199.189455] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.200258] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945043, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.274748] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1719e2a-8735-4145-93be-7228dc99a63f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.282568] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6adc891-063c-4cf1-9786-edf7329ebb29 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.311105] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e551fe10-b845-4eee-b3cf-d3fc4998edd0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.318182] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4111e1-7f2e-4266-8487-27ab7dba4c21 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.332495] env[68437]: DEBUG nova.compute.provider_tree [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.338904] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1199.339197] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3dcbf088-8db5-4488-bd04-d3d5c4890f16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.345832] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1199.345832] env[68437]: value = "task-2945044" [ 1199.345832] env[68437]: _type = "Task" [ 1199.345832] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.353667] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945044, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.519493] env[68437]: DEBUG nova.compute.manager [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1199.699102] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945043, 'name': ReconfigVM_Task, 'duration_secs': 0.173909} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.699404] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1199.700199] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e98ab2-889d-4d22-9b27-a6a6e63cdf58 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.721671] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 00b76648-d27b-4002-80cb-366e64c32ecc/00b76648-d27b-4002-80cb-366e64c32ecc.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1199.721909] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b210d98-78ff-4cc3-bcc1-068a6249bd23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.739326] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1199.739326] env[68437]: value = "task-2945045" [ 1199.739326] env[68437]: _type = "Task" [ 1199.739326] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.746863] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945045, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.836274] env[68437]: DEBUG nova.scheduler.client.report [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1199.856372] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945044, 'name': PowerOffVM_Task, 'duration_secs': 0.236445} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.856650] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1199.856828] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance '8ccd7176-55c0-4118-a07e-3c4bdbba9795' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1200.042555] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.249812] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945045, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.341983] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.758s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.344471] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.659s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.344695] env[68437]: DEBUG nova.objects.instance [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'pci_requests' on Instance uuid aff861ed-e792-480a-811e-c157c0606d08 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.362900] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1200.363272] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1200.363504] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1200.363731] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1200.363939] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1200.364384] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1200.364384] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1200.364556] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1200.364798] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1200.365039] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1200.365270] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1200.372208] env[68437]: INFO nova.scheduler.client.report [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted allocations for instance 76d97a56-21a2-4363-a987-ef872f056510 [ 1200.373842] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67141bff-c080-4fda-9db9-1b0236564219 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.394413] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1200.394413] env[68437]: value = "task-2945046" [ 1200.394413] env[68437]: _type = "Task" [ 1200.394413] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.405117] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945046, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.749394] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945045, 'name': ReconfigVM_Task, 'duration_secs': 0.684907} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.749786] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 00b76648-d27b-4002-80cb-366e64c32ecc/00b76648-d27b-4002-80cb-366e64c32ecc.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1200.749924] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance '00b76648-d27b-4002-80cb-366e64c32ecc' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1200.848250] env[68437]: DEBUG nova.objects.instance [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'numa_topology' on Instance uuid aff861ed-e792-480a-811e-c157c0606d08 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.890869] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5f0a497e-876e-4c36-91f7-e447d3ac7b60 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "76d97a56-21a2-4363-a987-ef872f056510" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.245s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.904644] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945046, 'name': ReconfigVM_Task, 'duration_secs': 0.4468} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.905073] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance '8ccd7176-55c0-4118-a07e-3c4bdbba9795' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.256605] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff8b552-98a9-4d9f-a3ac-21319aa5901c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.276496] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b346a2-1742-416a-a496-15c6aff80e77 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.293718] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance '00b76648-d27b-4002-80cb-366e64c32ecc' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1201.350750] env[68437]: INFO nova.compute.claims [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1201.411238] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1201.411468] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.411468] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1201.411639] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.411789] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1201.411938] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1201.412258] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1201.412345] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1201.412490] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1201.412655] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1201.412829] env[68437]: DEBUG nova.virt.hardware [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1201.418334] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Reconfiguring VM instance instance-00000029 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1201.418869] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96b3fd64-a40e-460b-a2cd-7d0773874ecf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.437216] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1201.437216] env[68437]: value = "task-2945047" [ 1201.437216] env[68437]: _type = "Task" [ 1201.437216] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.444966] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945047, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.830681] env[68437]: DEBUG nova.network.neutron [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Port 4f16f068-3b53-4a8d-a82b-21114eb371ea binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 1201.948466] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945047, 'name': ReconfigVM_Task, 'duration_secs': 0.287997} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.948763] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Reconfigured VM instance instance-00000029 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1201.949525] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20ba98f-89ee-474d-a58b-b50bf90b0c60 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.972787] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 8ccd7176-55c0-4118-a07e-3c4bdbba9795/8ccd7176-55c0-4118-a07e-3c4bdbba9795.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1201.973130] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94f05384-156f-4422-9ca2-f3962f3bd100 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.990442] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1201.990442] env[68437]: value = "task-2945048" [ 1201.990442] env[68437]: _type = "Task" [ 1201.990442] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.998283] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945048, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.230688] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.442759] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "ede62837-4ff5-44be-a015-9ea06b9126a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.443055] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.501392] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945048, 'name': ReconfigVM_Task, 'duration_secs': 0.445773} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.501675] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 8ccd7176-55c0-4118-a07e-3c4bdbba9795/8ccd7176-55c0-4118-a07e-3c4bdbba9795.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1202.501937] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance '8ccd7176-55c0-4118-a07e-3c4bdbba9795' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1202.515120] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3394fce2-1b6c-4d07-b834-cc4839cc0d30 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.521891] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30e8123-991a-4e8c-947b-61e18f6f5ddd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.553895] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ec2fde-2ae8-4ec6-8075-dcf07fc20973 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.563549] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea11031a-949b-4b31-961e-a1fc2e1642c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.579344] env[68437]: DEBUG nova.compute.provider_tree [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1202.853954] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "00b76648-d27b-4002-80cb-366e64c32ecc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.854285] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.854356] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.945410] env[68437]: DEBUG nova.compute.manager [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1203.010317] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af546b0-ee87-4632-9ad0-e02b1c246f8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.029303] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8fa362a-617f-4da3-bc93-416220684293 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.045882] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance '8ccd7176-55c0-4118-a07e-3c4bdbba9795' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1203.099898] env[68437]: ERROR nova.scheduler.client.report [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [req-d1dd24f8-1ac8-4fc8-b8e4-1165ac755117] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 422e986f-b38b-46ad-94b3-91f3ccd10a05. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d1dd24f8-1ac8-4fc8-b8e4-1165ac755117"}]} [ 1203.116148] env[68437]: DEBUG nova.scheduler.client.report [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1203.129497] env[68437]: DEBUG nova.scheduler.client.report [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1203.129740] env[68437]: DEBUG nova.compute.provider_tree [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1203.142363] env[68437]: DEBUG nova.scheduler.client.report [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1203.159386] env[68437]: DEBUG nova.scheduler.client.report [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1203.296794] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87327888-ac88-4ad8-ad52-722da5b74cbd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.304114] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7594bc9-d9b4-45a5-aa88-92057f614d8c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.333085] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39541230-24c2-41a9-8070-cbf1131e8d95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.340205] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f566630-7417-4d86-a0c1-064bf628f672 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.353066] env[68437]: DEBUG nova.compute.provider_tree [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1203.463327] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.583992] env[68437]: DEBUG nova.network.neutron [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Port 4ec75d03-3ee1-480d-ab6a-acc211fd6bae binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 1203.882319] env[68437]: DEBUG nova.scheduler.client.report [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Updated inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with generation 138 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1203.882639] env[68437]: DEBUG nova.compute.provider_tree [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 138 to 139 during operation: update_inventory {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1203.882774] env[68437]: DEBUG nova.compute.provider_tree [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1203.887939] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.888127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.888308] env[68437]: DEBUG nova.network.neutron [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1204.388244] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.044s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.390500] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.348s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.393746] env[68437]: INFO nova.compute.claims [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1204.425430] env[68437]: INFO nova.network.neutron [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating port 5f058ce1-be0f-4b97-be84-11302a668781 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1204.604629] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.604802] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.604983] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.631615] env[68437]: DEBUG nova.network.neutron [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance_info_cache with network_info: [{"id": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "address": "fa:16:3e:45:d4:06", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f16f068-3b", "ovs_interfaceid": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.133684] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.541467] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aababaee-f724-43f7-8981-a728d1371801 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.549279] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a89d670-1f4f-4e02-a7c2-6a90708390ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.581556] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31915924-adb9-47a1-a0ed-e7a7efd8d5dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.588899] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4afea760-686e-4a64-abc4-f299ae3ef883 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.602330] env[68437]: DEBUG nova.compute.provider_tree [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.648672] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.648861] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.649048] env[68437]: DEBUG nova.network.neutron [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1205.656116] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2cda0f-6f6f-4668-9170-91ef3de6060e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.675396] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479ba466-97bf-4ab1-a660-9e0047f21d67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.681934] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance '00b76648-d27b-4002-80cb-366e64c32ecc' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1205.816775] env[68437]: DEBUG nova.compute.manager [req-e112852f-40e5-4da8-b0b7-54aaa4598552 req-91bad151-a79f-44b2-af2e-5305fc914a99 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received event network-vif-plugged-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1205.816775] env[68437]: DEBUG oslo_concurrency.lockutils [req-e112852f-40e5-4da8-b0b7-54aaa4598552 req-91bad151-a79f-44b2-af2e-5305fc914a99 service nova] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1205.817035] env[68437]: DEBUG oslo_concurrency.lockutils [req-e112852f-40e5-4da8-b0b7-54aaa4598552 req-91bad151-a79f-44b2-af2e-5305fc914a99 service nova] Lock "aff861ed-e792-480a-811e-c157c0606d08-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.817104] env[68437]: DEBUG oslo_concurrency.lockutils [req-e112852f-40e5-4da8-b0b7-54aaa4598552 req-91bad151-a79f-44b2-af2e-5305fc914a99 service nova] Lock "aff861ed-e792-480a-811e-c157c0606d08-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.817343] env[68437]: DEBUG nova.compute.manager [req-e112852f-40e5-4da8-b0b7-54aaa4598552 req-91bad151-a79f-44b2-af2e-5305fc914a99 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] No waiting events found dispatching network-vif-plugged-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1205.817420] env[68437]: WARNING nova.compute.manager [req-e112852f-40e5-4da8-b0b7-54aaa4598552 req-91bad151-a79f-44b2-af2e-5305fc914a99 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received unexpected event network-vif-plugged-5f058ce1-be0f-4b97-be84-11302a668781 for instance with vm_state shelved_offloaded and task_state spawning. [ 1205.903378] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1205.903574] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1205.903759] env[68437]: DEBUG nova.network.neutron [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1206.105631] env[68437]: DEBUG nova.scheduler.client.report [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1206.188480] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1206.189103] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a813ece-8517-4e1f-baeb-9da043e2b046 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.196808] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1206.196808] env[68437]: value = "task-2945049" [ 1206.196808] env[68437]: _type = "Task" [ 1206.196808] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.204211] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945049, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.354355] env[68437]: DEBUG nova.network.neutron [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance_info_cache with network_info: [{"id": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "address": "fa:16:3e:cc:6f:88", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec75d03-3e", "ovs_interfaceid": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.609355] env[68437]: DEBUG nova.network.neutron [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating instance_info_cache with network_info: [{"id": "5f058ce1-be0f-4b97-be84-11302a668781", "address": "fa:16:3e:4d:8c:0f", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f058ce1-be", "ovs_interfaceid": "5f058ce1-be0f-4b97-be84-11302a668781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.611143] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.611677] env[68437]: DEBUG nova.compute.manager [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1206.614347] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.151s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.615731] env[68437]: INFO nova.compute.claims [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1206.706382] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945049, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.733504] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.733722] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.733877] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.734050] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.734202] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1206.857113] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.115263] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.119593] env[68437]: DEBUG nova.compute.utils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1207.122839] env[68437]: DEBUG nova.compute.manager [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1207.123047] env[68437]: DEBUG nova.network.neutron [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1207.142359] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e6d935ba9e53ac41b925970349996f0f',container_format='bare',created_at=2025-03-11T18:46:19Z,direct_url=,disk_format='vmdk',id=fa463cf5-5b17-4cb0-9385-6d7d061c3876,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1547216346-shelved',owner='0e28f7fd8c8d412f8c9e1624c55d6604',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-03-11T18:46:32Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1207.142611] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1207.142770] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1207.142955] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1207.143141] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1207.143299] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1207.143513] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1207.143672] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1207.143838] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1207.144033] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1207.144228] env[68437]: DEBUG nova.virt.hardware [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1207.145309] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0d9ad2-93a3-472d-b5d7-e4c304c4fbaf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.153517] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d86b2f-a6e1-47e3-9673-5a7689ced1d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.159862] env[68437]: DEBUG nova.policy [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f745cbd7edb641af8623447b00021ac6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b7dfebb79e54e4fba7e0b142f99d7eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1207.169319] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:8c:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f058ce1-be0f-4b97-be84-11302a668781', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1207.176583] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1207.177087] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aff861ed-e792-480a-811e-c157c0606d08] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1207.177307] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f8361bc-04a4-4c92-a2e2-beb0ae687b12 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.195438] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1207.195438] env[68437]: value = "task-2945050" [ 1207.195438] env[68437]: _type = "Task" [ 1207.195438] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.207010] env[68437]: DEBUG oslo_vmware.api [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945049, 'name': PowerOnVM_Task, 'duration_secs': 0.577266} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.209837] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1207.210050] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c24b119b-e15c-4435-96fc-b0cce3606671 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance '00b76648-d27b-4002-80cb-366e64c32ecc' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1207.213750] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945050, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.231640] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.387053] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27717b3-83c5-4694-97c7-e8ed6f40b632 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.405753] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0317c2-7843-42d6-bc40-53075a526943 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.412827] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance '8ccd7176-55c0-4118-a07e-3c4bdbba9795' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1207.423907] env[68437]: DEBUG nova.network.neutron [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Successfully created port: bf3637c1-c320-4b68-874d-64e97b9d845e {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1207.624181] env[68437]: DEBUG nova.compute.manager [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1207.706798] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945050, 'name': CreateVM_Task, 'duration_secs': 0.331505} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.708951] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aff861ed-e792-480a-811e-c157c0606d08] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1207.709955] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.710139] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.710491] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1207.710737] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-407fd959-a387-4b1b-8f2c-9749be009b1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.717444] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1207.717444] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ccb7e0-7633-caae-b678-ee9f70381c08" [ 1207.717444] env[68437]: _type = "Task" [ 1207.717444] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.728105] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ccb7e0-7633-caae-b678-ee9f70381c08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.789626] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bf429b-e899-4999-8685-03e12dbcf838 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.796800] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13986acf-4610-4448-b2b8-b1374fe89b61 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.827369] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee2f4ad-1fc4-40ab-8441-f9a0ec7182a5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.835145] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb2f073-3734-4207-bc13-6fea829d366d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.848099] env[68437]: DEBUG nova.compute.provider_tree [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.851140] env[68437]: DEBUG nova.compute.manager [req-94d66d0a-abbb-4185-a746-21d5f6369c42 req-201b8b49-0022-460a-b6f7-e7f1148402a6 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received event network-changed-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1207.851327] env[68437]: DEBUG nova.compute.manager [req-94d66d0a-abbb-4185-a746-21d5f6369c42 req-201b8b49-0022-460a-b6f7-e7f1148402a6 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Refreshing instance network info cache due to event network-changed-5f058ce1-be0f-4b97-be84-11302a668781. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1207.851898] env[68437]: DEBUG oslo_concurrency.lockutils [req-94d66d0a-abbb-4185-a746-21d5f6369c42 req-201b8b49-0022-460a-b6f7-e7f1148402a6 service nova] Acquiring lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.851898] env[68437]: DEBUG oslo_concurrency.lockutils [req-94d66d0a-abbb-4185-a746-21d5f6369c42 req-201b8b49-0022-460a-b6f7-e7f1148402a6 service nova] Acquired lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.851898] env[68437]: DEBUG nova.network.neutron [req-94d66d0a-abbb-4185-a746-21d5f6369c42 req-201b8b49-0022-460a-b6f7-e7f1148402a6 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Refreshing network info cache for port 5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1207.920074] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1207.920656] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87db95b6-0a67-4adf-a9be-87a93b15a79b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.927844] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1207.927844] env[68437]: value = "task-2945051" [ 1207.927844] env[68437]: _type = "Task" [ 1207.927844] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.941297] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945051, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.233039] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.233039] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Processing image fa463cf5-5b17-4cb0-9385-6d7d061c3876 {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1208.233382] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876/fa463cf5-5b17-4cb0-9385-6d7d061c3876.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.233465] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876/fa463cf5-5b17-4cb0-9385-6d7d061c3876.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.233646] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1208.233899] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12c4a953-f906-4089-b3ae-e9e531c219e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.242155] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1208.242336] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1208.243123] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f711747-ccb1-4b7e-8657-23d52739be72 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.247987] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1208.247987] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52be61f5-759a-9848-326b-9cbb49b7d158" [ 1208.247987] env[68437]: _type = "Task" [ 1208.247987] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.255702] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52be61f5-759a-9848-326b-9cbb49b7d158, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.354850] env[68437]: DEBUG nova.scheduler.client.report [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1208.440753] env[68437]: DEBUG oslo_vmware.api [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945051, 'name': PowerOnVM_Task, 'duration_secs': 0.411843} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.440961] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1208.441179] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d411a895-d1c4-409a-8628-02b36ad545f8 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance '8ccd7176-55c0-4118-a07e-3c4bdbba9795' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1208.571359] env[68437]: DEBUG nova.network.neutron [req-94d66d0a-abbb-4185-a746-21d5f6369c42 req-201b8b49-0022-460a-b6f7-e7f1148402a6 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updated VIF entry in instance network info cache for port 5f058ce1-be0f-4b97-be84-11302a668781. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1208.571768] env[68437]: DEBUG nova.network.neutron [req-94d66d0a-abbb-4185-a746-21d5f6369c42 req-201b8b49-0022-460a-b6f7-e7f1148402a6 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating instance_info_cache with network_info: [{"id": "5f058ce1-be0f-4b97-be84-11302a668781", "address": "fa:16:3e:4d:8c:0f", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.235", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f058ce1-be", "ovs_interfaceid": "5f058ce1-be0f-4b97-be84-11302a668781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.633767] env[68437]: DEBUG nova.compute.manager [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1208.659297] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1208.659553] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1208.659709] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1208.659888] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1208.660046] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1208.660200] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1208.660412] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1208.660572] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1208.660738] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1208.660982] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1208.661189] env[68437]: DEBUG nova.virt.hardware [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1208.662048] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06e94e1-77c1-4f44-adb7-4bcd90b24920 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.669878] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a6e1fb-52e0-416a-a1c6-6bce8ae2dc41 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.759277] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Preparing fetch location {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1208.759277] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Fetch image to [datastore1] OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573/OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573.vmdk {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1208.759277] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Downloading stream optimized image fa463cf5-5b17-4cb0-9385-6d7d061c3876 to [datastore1] OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573/OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573.vmdk on the data store datastore1 as vApp {{(pid=68437) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1208.759277] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Downloading image file data fa463cf5-5b17-4cb0-9385-6d7d061c3876 to the ESX as VM named 'OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573' {{(pid=68437) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1208.835311] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1208.835311] env[68437]: value = "resgroup-9" [ 1208.835311] env[68437]: _type = "ResourcePool" [ 1208.835311] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1208.835671] env[68437]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-628f9134-55c0-410a-b76f-fdd8f2360609 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.858719] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lease: (returnval){ [ 1208.858719] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5217afc6-71c5-d590-d9c9-227479120e3e" [ 1208.858719] env[68437]: _type = "HttpNfcLease" [ 1208.858719] env[68437]: } obtained for vApp import into resource pool (val){ [ 1208.858719] env[68437]: value = "resgroup-9" [ 1208.858719] env[68437]: _type = "ResourcePool" [ 1208.858719] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1208.858969] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the lease: (returnval){ [ 1208.858969] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5217afc6-71c5-d590-d9c9-227479120e3e" [ 1208.858969] env[68437]: _type = "HttpNfcLease" [ 1208.858969] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1208.862603] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.248s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.863116] env[68437]: DEBUG nova.compute.manager [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1208.869100] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1208.869100] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5217afc6-71c5-d590-d9c9-227479120e3e" [ 1208.869100] env[68437]: _type = "HttpNfcLease" [ 1208.869100] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1208.896665] env[68437]: DEBUG nova.network.neutron [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Successfully updated port: bf3637c1-c320-4b68-874d-64e97b9d845e {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1209.075189] env[68437]: DEBUG oslo_concurrency.lockutils [req-94d66d0a-abbb-4185-a746-21d5f6369c42 req-201b8b49-0022-460a-b6f7-e7f1148402a6 service nova] Releasing lock "refresh_cache-aff861ed-e792-480a-811e-c157c0606d08" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.323342] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "00b76648-d27b-4002-80cb-366e64c32ecc" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.323718] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.323767] env[68437]: DEBUG nova.compute.manager [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Going to confirm migration 5 {{(pid=68437) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1209.366241] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1209.366241] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5217afc6-71c5-d590-d9c9-227479120e3e" [ 1209.366241] env[68437]: _type = "HttpNfcLease" [ 1209.366241] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1209.368527] env[68437]: DEBUG nova.compute.utils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1209.369797] env[68437]: DEBUG nova.compute.manager [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1209.369966] env[68437]: DEBUG nova.network.neutron [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1209.399351] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "refresh_cache-55076395-fd1d-48a7-ab85-fe0eb03afa19" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.399535] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "refresh_cache-55076395-fd1d-48a7-ab85-fe0eb03afa19" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.399644] env[68437]: DEBUG nova.network.neutron [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1209.405970] env[68437]: DEBUG nova.policy [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7605d44a5b5448a3966872b4f524d13c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40d8becefc85431b9723c72aa09d152b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1209.666642] env[68437]: DEBUG nova.network.neutron [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Successfully created port: bce31050-7ee6-4cbb-8b64-b2bf9be3c649 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1209.867745] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1209.867745] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5217afc6-71c5-d590-d9c9-227479120e3e" [ 1209.867745] env[68437]: _type = "HttpNfcLease" [ 1209.867745] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1209.868281] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1209.868281] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5217afc6-71c5-d590-d9c9-227479120e3e" [ 1209.868281] env[68437]: _type = "HttpNfcLease" [ 1209.868281] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1209.868764] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8fb4b8-cd52-4bb4-bb69-0ce45b3bde6b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.872559] env[68437]: DEBUG nova.compute.manager [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1209.877548] env[68437]: DEBUG nova.compute.manager [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Received event network-vif-plugged-bf3637c1-c320-4b68-874d-64e97b9d845e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1209.877548] env[68437]: DEBUG oslo_concurrency.lockutils [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] Acquiring lock "55076395-fd1d-48a7-ab85-fe0eb03afa19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.877548] env[68437]: DEBUG oslo_concurrency.lockutils [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] Lock "55076395-fd1d-48a7-ab85-fe0eb03afa19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.877548] env[68437]: DEBUG oslo_concurrency.lockutils [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] Lock "55076395-fd1d-48a7-ab85-fe0eb03afa19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.877548] env[68437]: DEBUG nova.compute.manager [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] No waiting events found dispatching network-vif-plugged-bf3637c1-c320-4b68-874d-64e97b9d845e {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1209.877548] env[68437]: WARNING nova.compute.manager [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Received unexpected event network-vif-plugged-bf3637c1-c320-4b68-874d-64e97b9d845e for instance with vm_state building and task_state spawning. [ 1209.877548] env[68437]: DEBUG nova.compute.manager [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Received event network-changed-bf3637c1-c320-4b68-874d-64e97b9d845e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1209.877548] env[68437]: DEBUG nova.compute.manager [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Refreshing instance network info cache due to event network-changed-bf3637c1-c320-4b68-874d-64e97b9d845e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1209.877548] env[68437]: DEBUG oslo_concurrency.lockutils [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] Acquiring lock "refresh_cache-55076395-fd1d-48a7-ab85-fe0eb03afa19" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.882837] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52869f54-30dc-5b90-da77-845d9abdeb8a/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1209.883093] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52869f54-30dc-5b90-da77-845d9abdeb8a/disk-0.vmdk. {{(pid=68437) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1209.886589] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.886748] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquired lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.886919] env[68437]: DEBUG nova.network.neutron [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1209.887108] env[68437]: DEBUG nova.objects.instance [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'info_cache' on Instance uuid 00b76648-d27b-4002-80cb-366e64c32ecc {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.950486] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b80261e1-656a-4bde-a657-33751448afb8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.981656] env[68437]: DEBUG nova.network.neutron [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1210.152425] env[68437]: DEBUG nova.network.neutron [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Updating instance_info_cache with network_info: [{"id": "bf3637c1-c320-4b68-874d-64e97b9d845e", "address": "fa:16:3e:3a:ca:3b", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf3637c1-c3", "ovs_interfaceid": "bf3637c1-c320-4b68-874d-64e97b9d845e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.226812] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.656423] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "refresh_cache-55076395-fd1d-48a7-ab85-fe0eb03afa19" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.656423] env[68437]: DEBUG nova.compute.manager [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Instance network_info: |[{"id": "bf3637c1-c320-4b68-874d-64e97b9d845e", "address": "fa:16:3e:3a:ca:3b", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf3637c1-c3", "ovs_interfaceid": "bf3637c1-c320-4b68-874d-64e97b9d845e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1210.656423] env[68437]: DEBUG oslo_concurrency.lockutils [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] Acquired lock "refresh_cache-55076395-fd1d-48a7-ab85-fe0eb03afa19" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.656851] env[68437]: DEBUG nova.network.neutron [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Refreshing network info cache for port bf3637c1-c320-4b68-874d-64e97b9d845e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1210.657992] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:ca:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf3637c1-c320-4b68-874d-64e97b9d845e', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1210.666984] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1210.676486] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.676772] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.676992] env[68437]: DEBUG nova.compute.manager [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Going to confirm migration 6 {{(pid=68437) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1210.678315] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1210.680853] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c25d18f-6d4b-48e8-a116-59661e378377 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.708043] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1210.708043] env[68437]: value = "task-2945053" [ 1210.708043] env[68437]: _type = "Task" [ 1210.708043] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.717901] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945053, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.733015] env[68437]: DEBUG nova.network.neutron [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance_info_cache with network_info: [{"id": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "address": "fa:16:3e:45:d4:06", "network": {"id": "98068b07-0d1f-41bc-9ea8-7f47311d2ffb", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-908726496-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38ad479949b24307b08e16fdb821c76f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89470f7f-1c8b-4c83-92b5-6f73a77c520f", "external-id": "nsx-vlan-transportzone-929", "segmentation_id": 929, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f16f068-3b", "ovs_interfaceid": "4f16f068-3b53-4a8d-a82b-21114eb371ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.736925] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.887088] env[68437]: DEBUG nova.compute.manager [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1210.919704] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1210.920070] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1210.920269] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1210.920474] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1210.920640] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1210.920791] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1210.921149] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1210.921357] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1210.921564] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1210.921739] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1210.921916] env[68437]: DEBUG nova.virt.hardware [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1210.922868] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c733dc-b3c7-4555-a4b0-c3e23bcc053f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.931554] env[68437]: DEBUG nova.network.neutron [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Updated VIF entry in instance network info cache for port bf3637c1-c320-4b68-874d-64e97b9d845e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1210.931928] env[68437]: DEBUG nova.network.neutron [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Updating instance_info_cache with network_info: [{"id": "bf3637c1-c320-4b68-874d-64e97b9d845e", "address": "fa:16:3e:3a:ca:3b", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf3637c1-c3", "ovs_interfaceid": "bf3637c1-c320-4b68-874d-64e97b9d845e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.938675] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfc3155-478c-448d-822b-3b9d00adb347 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.143315] env[68437]: DEBUG nova.network.neutron [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Successfully updated port: bce31050-7ee6-4cbb-8b64-b2bf9be3c649 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1211.219062] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945053, 'name': CreateVM_Task, 'duration_secs': 0.436015} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.220816] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1211.221600] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.221773] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.222176] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1211.222450] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3d81154-8d52-43ea-8fac-858af0d43069 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.227489] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1211.227489] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d3a5aa-e716-0061-163f-b2203e53229a" [ 1211.227489] env[68437]: _type = "Task" [ 1211.227489] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.236843] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Releasing lock "refresh_cache-00b76648-d27b-4002-80cb-366e64c32ecc" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.237102] env[68437]: DEBUG nova.objects.instance [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lazy-loading 'migration_context' on Instance uuid 00b76648-d27b-4002-80cb-366e64c32ecc {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1211.238907] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.239093] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.239266] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.239428] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1211.239706] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d3a5aa-e716-0061-163f-b2203e53229a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.240789] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.240983] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.241206] env[68437]: DEBUG nova.network.neutron [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1211.241392] env[68437]: DEBUG nova.objects.instance [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'info_cache' on Instance uuid 8ccd7176-55c0-4118-a07e-3c4bdbba9795 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1211.245880] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef0c8c0-682d-4a91-97e2-e517627b8dc7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.254397] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c0bc10-e0ae-4d9e-bf7d-aceb37b28a07 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.270776] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d2185d-4873-446e-a330-dc84e87cc484 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.277482] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a36754-eb01-4250-a4cf-e9425e08b2ec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.309153] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179538MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1211.309356] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.309527] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.365362] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Completed reading data from the image iterator. {{(pid=68437) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1211.365621] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52869f54-30dc-5b90-da77-845d9abdeb8a/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1211.366755] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7c8d98-2b75-4ae1-864b-dca41b29100b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.373864] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52869f54-30dc-5b90-da77-845d9abdeb8a/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1211.374112] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52869f54-30dc-5b90-da77-845d9abdeb8a/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1211.374611] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-f890f96a-f899-40c3-811f-8c3f1ef5c80e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.438205] env[68437]: DEBUG oslo_concurrency.lockutils [req-0a01e848-2a2c-46c4-b790-8b80434df466 req-bcc129d2-e634-4964-8cf5-a7d263f3e9b9 service nova] Releasing lock "refresh_cache-55076395-fd1d-48a7-ab85-fe0eb03afa19" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.557411] env[68437]: DEBUG oslo_vmware.rw_handles [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52869f54-30dc-5b90-da77-845d9abdeb8a/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1211.557641] env[68437]: INFO nova.virt.vmwareapi.images [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Downloaded image file data fa463cf5-5b17-4cb0-9385-6d7d061c3876 [ 1211.558558] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d520e8-3d4c-4bf4-8afc-0f3602793b29 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.574158] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbf4e97d-6da8-4936-a4c4-539183ac3574 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.606593] env[68437]: INFO nova.virt.vmwareapi.images [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] The imported VM was unregistered [ 1211.609105] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Caching image {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1211.609369] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating directory with path [datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876 {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1211.609621] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-414874fb-fe7d-4d30-bba5-542ce95e4615 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.642089] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Created directory with path [datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876 {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1211.642335] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573/OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573.vmdk to [datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876/fa463cf5-5b17-4cb0-9385-6d7d061c3876.vmdk. {{(pid=68437) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1211.642611] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-dadb2bd0-7299-4e09-8279-d1939995e76e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.649099] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "refresh_cache-ede62837-4ff5-44be-a015-9ea06b9126a5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.649275] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "refresh_cache-ede62837-4ff5-44be-a015-9ea06b9126a5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.649463] env[68437]: DEBUG nova.network.neutron [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1211.650575] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1211.650575] env[68437]: value = "task-2945055" [ 1211.650575] env[68437]: _type = "Task" [ 1211.650575] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.658329] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945055, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.740426] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d3a5aa-e716-0061-163f-b2203e53229a, 'name': SearchDatastore_Task, 'duration_secs': 0.017257} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.740744] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.741029] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1211.741281] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.741432] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.741613] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1211.741861] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9799c3a1-bf6b-4f28-8d9f-6d3c212f77f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.746447] env[68437]: DEBUG nova.objects.base [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Object Instance<00b76648-d27b-4002-80cb-366e64c32ecc> lazy-loaded attributes: info_cache,migration_context {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1211.747253] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ce84e1-018f-4e92-a10a-dc4ecd18fe21 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.767847] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-863c39de-0528-408d-a147-9f768187dc5f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.770025] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1211.770212] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1211.771125] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abcdb3bc-0ddd-4bc2-8994-2bcfeced75ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.775988] env[68437]: DEBUG oslo_vmware.api [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1211.775988] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52960bdb-ff8c-df92-3bab-c42547e18308" [ 1211.775988] env[68437]: _type = "Task" [ 1211.775988] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.777261] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1211.777261] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c0043c-497f-1783-d62c-d20fc62acc34" [ 1211.777261] env[68437]: _type = "Task" [ 1211.777261] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.787354] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c0043c-497f-1783-d62c-d20fc62acc34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.790283] env[68437]: DEBUG oslo_vmware.api [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52960bdb-ff8c-df92-3bab-c42547e18308, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.898878] env[68437]: DEBUG nova.compute.manager [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Received event network-vif-plugged-bce31050-7ee6-4cbb-8b64-b2bf9be3c649 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1211.899023] env[68437]: DEBUG oslo_concurrency.lockutils [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] Acquiring lock "ede62837-4ff5-44be-a015-9ea06b9126a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.899245] env[68437]: DEBUG oslo_concurrency.lockutils [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.899453] env[68437]: DEBUG oslo_concurrency.lockutils [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.899640] env[68437]: DEBUG nova.compute.manager [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] No waiting events found dispatching network-vif-plugged-bce31050-7ee6-4cbb-8b64-b2bf9be3c649 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1211.899828] env[68437]: WARNING nova.compute.manager [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Received unexpected event network-vif-plugged-bce31050-7ee6-4cbb-8b64-b2bf9be3c649 for instance with vm_state building and task_state spawning. [ 1211.900019] env[68437]: DEBUG nova.compute.manager [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Received event network-changed-bce31050-7ee6-4cbb-8b64-b2bf9be3c649 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1211.900363] env[68437]: DEBUG nova.compute.manager [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Refreshing instance network info cache due to event network-changed-bce31050-7ee6-4cbb-8b64-b2bf9be3c649. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1211.900557] env[68437]: DEBUG oslo_concurrency.lockutils [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] Acquiring lock "refresh_cache-ede62837-4ff5-44be-a015-9ea06b9126a5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.163125] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945055, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.185469] env[68437]: DEBUG nova.network.neutron [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1212.290639] env[68437]: DEBUG oslo_vmware.api [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52960bdb-ff8c-df92-3bab-c42547e18308, 'name': SearchDatastore_Task, 'duration_secs': 0.014425} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.296227] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.296369] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c0043c-497f-1783-d62c-d20fc62acc34, 'name': SearchDatastore_Task, 'duration_secs': 0.061397} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.297409] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-374a7245-8408-4ecc-bda6-2706a77cdf00 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.302657] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1212.302657] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52be2764-9fea-9160-521f-f2680cfbff2b" [ 1212.302657] env[68437]: _type = "Task" [ 1212.302657] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.311438] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52be2764-9fea-9160-521f-f2680cfbff2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.320263] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Applying migration context for instance 00b76648-d27b-4002-80cb-366e64c32ecc as it has an incoming, in-progress migration 91867768-22b8-454f-a726-07dda53e3078. Migration status is finished {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1212.320477] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Applying migration context for instance 8ccd7176-55c0-4118-a07e-3c4bdbba9795 as it has an incoming, in-progress migration c451d302-0db3-4d94-a101-fdcd13f3e017. Migration status is confirming {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1212.321815] env[68437]: INFO nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating resource usage from migration 91867768-22b8-454f-a726-07dda53e3078 [ 1212.322149] env[68437]: INFO nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating resource usage from migration c451d302-0db3-4d94-a101-fdcd13f3e017 [ 1212.345639] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 353ebb37-7e69-49d4-873e-2272cbfff6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.345794] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance e2143e07-8c8d-4008-bb73-29aae91baee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.345967] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 191b441c-2c9f-48f9-b83a-d539722e6375 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.346046] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance e81e633d-34a6-443d-a2fe-95e6d8afa552 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.346168] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8554a78c-c2d7-459d-a295-121da777dfd4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.346284] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 7a05d783-afac-43a1-a715-c83b42c990c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.346398] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance aff861ed-e792-480a-811e-c157c0606d08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.346512] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Migration 91867768-22b8-454f-a726-07dda53e3078 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1212.346625] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 00b76648-d27b-4002-80cb-366e64c32ecc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.346736] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Migration c451d302-0db3-4d94-a101-fdcd13f3e017 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1212.346848] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8ccd7176-55c0-4118-a07e-3c4bdbba9795 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.346958] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 55076395-fd1d-48a7-ab85-fe0eb03afa19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.347083] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ede62837-4ff5-44be-a015-9ea06b9126a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1212.347453] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1212.347697] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3136MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1212.355357] env[68437]: DEBUG nova.network.neutron [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Updating instance_info_cache with network_info: [{"id": "bce31050-7ee6-4cbb-8b64-b2bf9be3c649", "address": "fa:16:3e:61:57:10", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbce31050-7e", "ovs_interfaceid": "bce31050-7ee6-4cbb-8b64-b2bf9be3c649", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.492153] env[68437]: DEBUG nova.network.neutron [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance_info_cache with network_info: [{"id": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "address": "fa:16:3e:cc:6f:88", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ec75d03-3e", "ovs_interfaceid": "4ec75d03-3ee1-480d-ab6a-acc211fd6bae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.511934] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57ee9d9-4469-4e30-9d68-898dcfbb0a29 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.519657] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c538a5-2d38-4a49-80e5-fd0468b1815d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.550483] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec6d85a-8118-40a9-9647-86eb857e51e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.557545] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da93ffd-ce1e-4122-8866-5fcb3a22c434 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.570960] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.665404] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945055, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.814788] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52be2764-9fea-9160-521f-f2680cfbff2b, 'name': SearchDatastore_Task, 'duration_secs': 0.044842} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.815079] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.815273] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 55076395-fd1d-48a7-ab85-fe0eb03afa19/55076395-fd1d-48a7-ab85-fe0eb03afa19.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1212.815620] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3540c85-43e9-4cac-b1b2-8143505c5715 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.824493] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1212.824493] env[68437]: value = "task-2945056" [ 1212.824493] env[68437]: _type = "Task" [ 1212.824493] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.834437] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.858640] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "refresh_cache-ede62837-4ff5-44be-a015-9ea06b9126a5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.859039] env[68437]: DEBUG nova.compute.manager [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Instance network_info: |[{"id": "bce31050-7ee6-4cbb-8b64-b2bf9be3c649", "address": "fa:16:3e:61:57:10", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbce31050-7e", "ovs_interfaceid": "bce31050-7ee6-4cbb-8b64-b2bf9be3c649", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1212.859401] env[68437]: DEBUG oslo_concurrency.lockutils [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] Acquired lock "refresh_cache-ede62837-4ff5-44be-a015-9ea06b9126a5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.859577] env[68437]: DEBUG nova.network.neutron [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Refreshing network info cache for port bce31050-7ee6-4cbb-8b64-b2bf9be3c649 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1212.860998] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:57:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bce31050-7ee6-4cbb-8b64-b2bf9be3c649', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1212.868820] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1212.871633] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1212.872238] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-feb91493-0f68-41b6-b78a-8386936431ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.892984] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1212.892984] env[68437]: value = "task-2945057" [ 1212.892984] env[68437]: _type = "Task" [ 1212.892984] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.903768] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945057, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.995038] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-8ccd7176-55c0-4118-a07e-3c4bdbba9795" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.995337] env[68437]: DEBUG nova.objects.instance [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'migration_context' on Instance uuid 8ccd7176-55c0-4118-a07e-3c4bdbba9795 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.074019] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.101879] env[68437]: DEBUG nova.network.neutron [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Updated VIF entry in instance network info cache for port bce31050-7ee6-4cbb-8b64-b2bf9be3c649. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1213.102291] env[68437]: DEBUG nova.network.neutron [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Updating instance_info_cache with network_info: [{"id": "bce31050-7ee6-4cbb-8b64-b2bf9be3c649", "address": "fa:16:3e:61:57:10", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbce31050-7e", "ovs_interfaceid": "bce31050-7ee6-4cbb-8b64-b2bf9be3c649", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.168329] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945055, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.336342] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.405382] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945057, 'name': CreateVM_Task} progress is 25%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.500595] env[68437]: DEBUG nova.objects.base [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Object Instance<8ccd7176-55c0-4118-a07e-3c4bdbba9795> lazy-loaded attributes: info_cache,migration_context {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1213.501836] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf79eee3-1eac-4ce5-a3fd-0651942955b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.523380] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59c8ac44-64a1-421f-898c-bea2d8f561fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.530225] env[68437]: DEBUG oslo_vmware.api [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1213.530225] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521053ac-d859-5339-4a5d-c4bcc30e5b87" [ 1213.530225] env[68437]: _type = "Task" [ 1213.530225] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.540533] env[68437]: DEBUG oslo_vmware.api [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521053ac-d859-5339-4a5d-c4bcc30e5b87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.579701] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1213.579986] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.270s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.580474] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.284s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.581803] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.582008] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Cleaning up deleted instances with incomplete migration {{(pid=68437) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 1213.605453] env[68437]: DEBUG oslo_concurrency.lockutils [req-1c5cff84-a75b-48bf-996b-8d56213aea46 req-c9fbd84f-0848-4338-8eba-5d841e5afa6d service nova] Releasing lock "refresh_cache-ede62837-4ff5-44be-a015-9ea06b9126a5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1213.668316] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945055, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.836845] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.906082] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945057, 'name': CreateVM_Task, 'duration_secs': 0.846819} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.906323] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1213.907101] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.907278] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.907610] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1213.907900] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcef4c12-d422-4fe5-830b-1ca3e0cbdd9f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.914412] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1213.914412] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d0c6fc-0f7f-92ca-14ed-b171eaf7628b" [ 1213.914412] env[68437]: _type = "Task" [ 1213.914412] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.924801] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d0c6fc-0f7f-92ca-14ed-b171eaf7628b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.042648] env[68437]: DEBUG oslo_vmware.api [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521053ac-d859-5339-4a5d-c4bcc30e5b87, 'name': SearchDatastore_Task, 'duration_secs': 0.041705} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.042960] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.175044] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945055, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.339335] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945056, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.427076] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d0c6fc-0f7f-92ca-14ed-b171eaf7628b, 'name': SearchDatastore_Task, 'duration_secs': 0.082117} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.429882] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.430137] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1214.430989] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.430989] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1214.430989] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1214.431261] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-973c7ef1-a1e8-4bbe-a81c-a14ba57d4655 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.443428] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a54680-f546-4c47-a5ac-4be4dd98546d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.449272] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1214.449466] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1214.451917] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e482f4c-486f-4c93-9a8d-418fb2c9227e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.455426] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfdb4ca-c873-4a23-99bd-d543861d4e0d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.462962] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1214.462962] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8c7f7-3917-1b9e-fcfb-e8805b9b0038" [ 1214.462962] env[68437]: _type = "Task" [ 1214.462962] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.490573] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcb9e7f-7ccd-40fa-bd91-02801e6b0958 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.499536] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8c7f7-3917-1b9e-fcfb-e8805b9b0038, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.502859] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca3ee42-5e9b-4860-af8a-258903201757 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.518015] env[68437]: DEBUG nova.compute.provider_tree [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.582746] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.583078] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.669080] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945055, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.839813] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945056, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.996225] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8c7f7-3917-1b9e-fcfb-e8805b9b0038, 'name': SearchDatastore_Task, 'duration_secs': 0.077543} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.996844] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fd78a21-ab88-4f42-b54e-7db466e9adba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.001698] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1215.001698] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521b4941-a0e9-2929-06f2-8cd396707051" [ 1215.001698] env[68437]: _type = "Task" [ 1215.001698] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.008676] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521b4941-a0e9-2929-06f2-8cd396707051, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.020635] env[68437]: DEBUG nova.scheduler.client.report [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.167023] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945055, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.230794] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.231021] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Cleaning up deleted instances {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 1215.340518] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945056, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.279738} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.340779] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 55076395-fd1d-48a7-ab85-fe0eb03afa19/55076395-fd1d-48a7-ab85-fe0eb03afa19.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1215.341043] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1215.341294] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1293e644-8918-447e-99cc-dca1f693970a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.347686] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1215.347686] env[68437]: value = "task-2945058" [ 1215.347686] env[68437]: _type = "Task" [ 1215.347686] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.355619] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945058, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.513875] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521b4941-a0e9-2929-06f2-8cd396707051, 'name': SearchDatastore_Task, 'duration_secs': 0.095809} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.513875] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.514148] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ede62837-4ff5-44be-a015-9ea06b9126a5/ede62837-4ff5-44be-a015-9ea06b9126a5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1215.514353] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d843b0ac-b9b1-4c7e-ba27-0f898c4fff9b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.520116] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1215.520116] env[68437]: value = "task-2945059" [ 1215.520116] env[68437]: _type = "Task" [ 1215.520116] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.530917] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.668184] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945055, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.560582} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.668469] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573/OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573.vmdk to [datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876/fa463cf5-5b17-4cb0-9385-6d7d061c3876.vmdk. [ 1215.668658] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Cleaning up location [datastore1] OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573 {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1215.668821] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_d81d5859-db13-450a-9693-dc98a8a16573 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1215.669089] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15deb28b-4ff7-43fd-a363-9e25029e82fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.676101] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1215.676101] env[68437]: value = "task-2945060" [ 1215.676101] env[68437]: _type = "Task" [ 1215.676101] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.684371] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.746959] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] There are 58 instances to clean {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 1215.746959] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 923133d9-3a2d-4309-83a7-ab59315ee4dc] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1215.857931] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945058, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065385} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.858514] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1215.859038] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f845156-3252-466d-ad7b-1b36780fe40c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.882895] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 55076395-fd1d-48a7-ab85-fe0eb03afa19/55076395-fd1d-48a7-ab85-fe0eb03afa19.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1215.882895] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8176797a-c96c-47c8-af2a-948b5729265b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.903848] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1215.903848] env[68437]: value = "task-2945061" [ 1215.903848] env[68437]: _type = "Task" [ 1215.903848] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.911552] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945061, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.029895] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945059, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.034050] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.453s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.036794] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.994s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.186213] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0669} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.186515] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1216.186691] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876/fa463cf5-5b17-4cb0-9385-6d7d061c3876.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1216.186949] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876/fa463cf5-5b17-4cb0-9385-6d7d061c3876.vmdk to [datastore1] aff861ed-e792-480a-811e-c157c0606d08/aff861ed-e792-480a-811e-c157c0606d08.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1216.187238] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be7d23cb-1526-4367-8546-d96387278f7c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.196014] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1216.196014] env[68437]: value = "task-2945062" [ 1216.196014] env[68437]: _type = "Task" [ 1216.196014] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.205458] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945062, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.249716] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 96c7aa3f-9098-49fe-8f5f-c3c45110fb4f] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1216.415647] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945061, 'name': ReconfigVM_Task, 'duration_secs': 0.437838} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.415919] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 55076395-fd1d-48a7-ab85-fe0eb03afa19/55076395-fd1d-48a7-ab85-fe0eb03afa19.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1216.416609] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-826b4807-2779-4651-9908-995c5ba250c0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.423061] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1216.423061] env[68437]: value = "task-2945063" [ 1216.423061] env[68437]: _type = "Task" [ 1216.423061] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.430304] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945063, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.531657] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945059, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.834095} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.531935] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] ede62837-4ff5-44be-a015-9ea06b9126a5/ede62837-4ff5-44be-a015-9ea06b9126a5.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1216.532172] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1216.532439] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3c51ef3-d8da-4afa-b7f0-09e4c7b4a7bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.542223] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1216.542223] env[68437]: value = "task-2945064" [ 1216.542223] env[68437]: _type = "Task" [ 1216.542223] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.557512] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945064, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.610420] env[68437]: INFO nova.scheduler.client.report [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted allocation for migration 91867768-22b8-454f-a726-07dda53e3078 [ 1216.708587] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945062, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.730040] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c24b5c-8394-4748-a369-537292d894d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.741066] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c139ca4a-7c1c-4fdc-a2a5-f4622b17f428 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.775361] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 6fabc758-0d56-4adb-a54e-b9c8798a0151] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1216.778704] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19670128-b451-486f-9209-2147fd542d06 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.788641] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bf6bc2-580a-47b0-859e-3103bd70085a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.805164] env[68437]: DEBUG nova.compute.provider_tree [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1216.935526] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945063, 'name': Rename_Task, 'duration_secs': 0.220616} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.935884] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1216.936174] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d23ebee-c8fe-4b80-a46a-43aa1bf26566 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.945297] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1216.945297] env[68437]: value = "task-2945065" [ 1216.945297] env[68437]: _type = "Task" [ 1216.945297] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.955803] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.056215] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945064, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093359} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.056498] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1217.057455] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f18173-aea0-42b0-87c0-61de463dd65e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.083343] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] ede62837-4ff5-44be-a015-9ea06b9126a5/ede62837-4ff5-44be-a015-9ea06b9126a5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1217.083712] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09b0f8cb-065f-4fe5-98e1-7d13c7558eed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.106091] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1217.106091] env[68437]: value = "task-2945066" [ 1217.106091] env[68437]: _type = "Task" [ 1217.106091] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.118062] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.794s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.119223] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945066, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.208776] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945062, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.278991] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: b4257b79-2723-43fd-b64f-74104802e048] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1217.327447] env[68437]: ERROR nova.scheduler.client.report [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [req-263d8f1a-96da-4efe-9839-0b88d4160b02] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 422e986f-b38b-46ad-94b3-91f3ccd10a05. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-263d8f1a-96da-4efe-9839-0b88d4160b02"}]} [ 1217.349616] env[68437]: DEBUG nova.scheduler.client.report [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1217.363435] env[68437]: DEBUG nova.scheduler.client.report [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1217.363666] env[68437]: DEBUG nova.compute.provider_tree [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1217.376709] env[68437]: DEBUG nova.scheduler.client.report [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1217.397571] env[68437]: DEBUG nova.scheduler.client.report [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1217.460770] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945065, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.560693] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9ce98b-0e8c-41a1-a622-81692c1c1f12 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.571610] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98a2b6d-7c9d-44e9-bc44-17d06b349bd5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.609746] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50339042-4813-45c2-b800-c3c12b216645 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.619556] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.623589] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdcacae-514d-420c-8aea-88a6146268a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.640687] env[68437]: DEBUG nova.compute.provider_tree [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1217.708472] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945062, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.783200] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 76ed714d-7ffe-4a64-ae78-bab76ba1504a] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1217.959238] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945065, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.120032] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.173425] env[68437]: DEBUG nova.scheduler.client.report [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Updated inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with generation 140 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1218.173706] env[68437]: DEBUG nova.compute.provider_tree [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 140 to 141 during operation: update_inventory {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1218.173951] env[68437]: DEBUG nova.compute.provider_tree [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1218.209929] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945062, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.286862] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 9b98ff24-e9d1-4754-89d2-ee2daa54ad47] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1218.349173] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "00b76648-d27b-4002-80cb-366e64c32ecc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.349436] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.349649] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "00b76648-d27b-4002-80cb-366e64c32ecc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.349856] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.350040] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.353404] env[68437]: INFO nova.compute.manager [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Terminating instance [ 1218.459665] env[68437]: DEBUG oslo_vmware.api [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945065, 'name': PowerOnVM_Task, 'duration_secs': 1.351626} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.459961] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1218.460184] env[68437]: INFO nova.compute.manager [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Took 9.83 seconds to spawn the instance on the hypervisor. [ 1218.460367] env[68437]: DEBUG nova.compute.manager [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1218.461220] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e613e7b-5815-45e0-9b43-db495d9f9445 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.618062] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945066, 'name': ReconfigVM_Task, 'duration_secs': 1.302697} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.618397] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Reconfigured VM instance instance-0000006f to attach disk [datastore1] ede62837-4ff5-44be-a015-9ea06b9126a5/ede62837-4ff5-44be-a015-9ea06b9126a5.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1218.618974] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57e9dbf0-50b8-4271-8422-ef1fdfdea638 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.624966] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1218.624966] env[68437]: value = "task-2945067" [ 1218.624966] env[68437]: _type = "Task" [ 1218.624966] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.636049] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945067, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.707704] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945062, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.422451} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.707948] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/fa463cf5-5b17-4cb0-9385-6d7d061c3876/fa463cf5-5b17-4cb0-9385-6d7d061c3876.vmdk to [datastore1] aff861ed-e792-480a-811e-c157c0606d08/aff861ed-e792-480a-811e-c157c0606d08.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1218.708705] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca28f620-6599-49f8-af76-5afca80d0104 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.729666] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] aff861ed-e792-480a-811e-c157c0606d08/aff861ed-e792-480a-811e-c157c0606d08.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1218.730116] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e55127b0-ed24-4a34-a2cf-220e28e87abb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.750078] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1218.750078] env[68437]: value = "task-2945068" [ 1218.750078] env[68437]: _type = "Task" [ 1218.750078] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.759353] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945068, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.790512] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 4fb5a384-0792-40df-b361-0784397a897f] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1218.857415] env[68437]: DEBUG nova.compute.manager [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1218.857715] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1218.858588] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60951c3c-9346-4eab-88e0-262778473147 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.866735] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1218.866958] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29c96e3b-f600-4178-a2fe-a78ad706198b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.872236] env[68437]: DEBUG oslo_vmware.api [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1218.872236] env[68437]: value = "task-2945069" [ 1218.872236] env[68437]: _type = "Task" [ 1218.872236] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.880950] env[68437]: DEBUG oslo_vmware.api [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.980928] env[68437]: INFO nova.compute.manager [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Took 18.96 seconds to build instance. [ 1219.135356] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945067, 'name': Rename_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.184794] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.148s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.259475] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945068, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.293512] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: cdb5b8d0-03ab-4020-a9aa-00688f7aef8e] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1219.381821] env[68437]: DEBUG oslo_vmware.api [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945069, 'name': PowerOffVM_Task, 'duration_secs': 0.302765} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.382252] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1219.382344] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1219.382542] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f76accb2-2b8f-4992-b8d3-dc13fd482a93 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.482618] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c630e739-0151-4e2f-a256-3f1accc056c8 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "55076395-fd1d-48a7-ab85-fe0eb03afa19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.466s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.635587] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945067, 'name': Rename_Task, 'duration_secs': 0.594104} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.635875] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1219.636164] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f678cf48-372d-4dbd-a1e7-7a02b7d032b3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.642201] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1219.642201] env[68437]: value = "task-2945071" [ 1219.642201] env[68437]: _type = "Task" [ 1219.642201] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.651662] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945071, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.748759] env[68437]: INFO nova.scheduler.client.report [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted allocation for migration c451d302-0db3-4d94-a101-fdcd13f3e017 [ 1219.763494] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945068, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.796450] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ea09a88a-d426-4af4-aa07-945ccfbf2a24] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1219.950360] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "55076395-fd1d-48a7-ab85-fe0eb03afa19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.950585] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "55076395-fd1d-48a7-ab85-fe0eb03afa19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.950791] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "55076395-fd1d-48a7-ab85-fe0eb03afa19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.950993] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "55076395-fd1d-48a7-ab85-fe0eb03afa19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.951170] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "55076395-fd1d-48a7-ab85-fe0eb03afa19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.953232] env[68437]: INFO nova.compute.manager [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Terminating instance [ 1220.010655] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1220.010948] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1220.011183] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleting the datastore file [datastore1] 00b76648-d27b-4002-80cb-366e64c32ecc {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1220.011515] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a50d8954-d614-4a08-b8ee-9d34941a4dca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.020427] env[68437]: DEBUG oslo_vmware.api [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for the task: (returnval){ [ 1220.020427] env[68437]: value = "task-2945072" [ 1220.020427] env[68437]: _type = "Task" [ 1220.020427] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.028247] env[68437]: DEBUG oslo_vmware.api [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.152402] env[68437]: DEBUG oslo_vmware.api [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945071, 'name': PowerOnVM_Task, 'duration_secs': 0.47525} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.152402] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1220.152402] env[68437]: INFO nova.compute.manager [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Took 9.27 seconds to spawn the instance on the hypervisor. [ 1220.153280] env[68437]: DEBUG nova.compute.manager [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1220.153464] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2cc166c-fb1e-4a49-94e7-e1bc71bf0ed3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.258954] env[68437]: DEBUG oslo_concurrency.lockutils [None req-20fb44f1-0c47-4a5e-bcf6-50c3272f0468 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.582s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.263927] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945068, 'name': ReconfigVM_Task, 'duration_secs': 1.051848} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.264456] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Reconfigured VM instance instance-00000062 to attach disk [datastore1] aff861ed-e792-480a-811e-c157c0606d08/aff861ed-e792-480a-811e-c157c0606d08.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1220.265619] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'device_type': 'disk', 'guest_format': None, 'encrypted': False, 'encryption_options': None, 'encryption_secret_uuid': None, 'boot_index': 0, 'size': 0, 'encryption_format': None, 'disk_bus': None, 'image_id': 'a272f526-6b8d-4a29-bd06-cd29ab5fabbe'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '66ea7ad0-a60c-43ab-9a4e-91ad36de2f80', 'device_type': None, 'mount_device': '/dev/sdb', 'boot_index': None, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591140', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'name': 'volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'aff861ed-e792-480a-811e-c157c0606d08', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'serial': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2'}, 'delete_on_termination': False, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68437) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1220.265874] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1220.266120] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591140', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'name': 'volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'aff861ed-e792-480a-811e-c157c0606d08', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'serial': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1220.267148] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5df4690-cb82-4f35-8f49-4e8410d70a09 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.284986] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5319f49e-4873-4ec8-bd5d-03e52f881f77 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.302939] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 9d54d4b6-9b92-4a24-9582-475108bf2710] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1220.312523] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2/volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1220.313230] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa6cd121-26f0-4239-a5da-ee5e2057b145 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.332546] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1220.332546] env[68437]: value = "task-2945073" [ 1220.332546] env[68437]: _type = "Task" [ 1220.332546] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.341648] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945073, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.456986] env[68437]: DEBUG nova.compute.manager [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1220.457297] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1220.458713] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c585072-3176-41ab-bbde-bb388abf5528 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.467922] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1220.468223] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd8834ad-c1bc-4acc-b38f-b31609460b44 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.475441] env[68437]: DEBUG oslo_vmware.api [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1220.475441] env[68437]: value = "task-2945074" [ 1220.475441] env[68437]: _type = "Task" [ 1220.475441] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.487433] env[68437]: DEBUG oslo_vmware.api [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945074, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.531363] env[68437]: DEBUG oslo_vmware.api [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Task: {'id': task-2945072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173585} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.531692] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1220.531923] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1220.532242] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1220.532406] env[68437]: INFO nova.compute.manager [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1220.532714] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1220.532938] env[68437]: DEBUG nova.compute.manager [-] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1220.533071] env[68437]: DEBUG nova.network.neutron [-] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1220.672322] env[68437]: INFO nova.compute.manager [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Took 17.22 seconds to build instance. [ 1220.814199] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 4abf1477-2f0e-4a13-884a-c19420b3e435] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1220.843581] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945073, 'name': ReconfigVM_Task, 'duration_secs': 0.313816} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.844422] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2/volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1220.850382] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8695f3a-6692-42d9-9955-a183b94dc4f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.866445] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1220.866445] env[68437]: value = "task-2945075" [ 1220.866445] env[68437]: _type = "Task" [ 1220.866445] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.876123] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945075, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.965868] env[68437]: DEBUG nova.compute.manager [req-d078d846-cf57-4dac-9979-3afe8a9af9dc req-ee44c725-ffc1-41a7-a409-799dd0c99d46 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Received event network-vif-deleted-4f16f068-3b53-4a8d-a82b-21114eb371ea {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1220.966132] env[68437]: INFO nova.compute.manager [req-d078d846-cf57-4dac-9979-3afe8a9af9dc req-ee44c725-ffc1-41a7-a409-799dd0c99d46 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Neutron deleted interface 4f16f068-3b53-4a8d-a82b-21114eb371ea; detaching it from the instance and deleting it from the info cache [ 1220.966327] env[68437]: DEBUG nova.network.neutron [req-d078d846-cf57-4dac-9979-3afe8a9af9dc req-ee44c725-ffc1-41a7-a409-799dd0c99d46 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.985949] env[68437]: DEBUG oslo_vmware.api [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945074, 'name': PowerOffVM_Task, 'duration_secs': 0.245163} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.986247] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1220.986427] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1220.986688] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2dafaab6-8227-478e-afc1-7e7b7ea52f74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.995036] env[68437]: DEBUG nova.compute.manager [req-e803d636-ebe1-4967-9fe7-c93811c182f1 req-d0de6baa-2dfd-497c-8175-ddb5fa7a2afe service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Received event network-changed-bce31050-7ee6-4cbb-8b64-b2bf9be3c649 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1220.995036] env[68437]: DEBUG nova.compute.manager [req-e803d636-ebe1-4967-9fe7-c93811c182f1 req-d0de6baa-2dfd-497c-8175-ddb5fa7a2afe service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Refreshing instance network info cache due to event network-changed-bce31050-7ee6-4cbb-8b64-b2bf9be3c649. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1220.995036] env[68437]: DEBUG oslo_concurrency.lockutils [req-e803d636-ebe1-4967-9fe7-c93811c182f1 req-d0de6baa-2dfd-497c-8175-ddb5fa7a2afe service nova] Acquiring lock "refresh_cache-ede62837-4ff5-44be-a015-9ea06b9126a5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.995229] env[68437]: DEBUG oslo_concurrency.lockutils [req-e803d636-ebe1-4967-9fe7-c93811c182f1 req-d0de6baa-2dfd-497c-8175-ddb5fa7a2afe service nova] Acquired lock "refresh_cache-ede62837-4ff5-44be-a015-9ea06b9126a5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.995267] env[68437]: DEBUG nova.network.neutron [req-e803d636-ebe1-4967-9fe7-c93811c182f1 req-d0de6baa-2dfd-497c-8175-ddb5fa7a2afe service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Refreshing network info cache for port bce31050-7ee6-4cbb-8b64-b2bf9be3c649 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1221.062275] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1221.062508] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1221.062663] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleting the datastore file [datastore1] 55076395-fd1d-48a7-ab85-fe0eb03afa19 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1221.063348] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ec23652-9dd8-4b75-9b3b-3b307953665a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.070324] env[68437]: DEBUG oslo_vmware.api [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1221.070324] env[68437]: value = "task-2945077" [ 1221.070324] env[68437]: _type = "Task" [ 1221.070324] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.079307] env[68437]: DEBUG oslo_vmware.api [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.175122] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5af8a417-6811-4710-bce9-0c56f9e6909f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.732s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.291533] env[68437]: DEBUG nova.network.neutron [-] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.317302] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 8d87308a-5583-4785-9f10-13a6f9b5fe98] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1221.377939] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945075, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.471501] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.471828] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.472117] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.472380] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.472628] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.474856] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-128a549b-bc9e-4785-bddb-82f8ce91ce22 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.477156] env[68437]: INFO nova.compute.manager [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Terminating instance [ 1221.485811] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc3fe02-95f8-42bf-a459-d155ae1aa6a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.523103] env[68437]: DEBUG nova.compute.manager [req-d078d846-cf57-4dac-9979-3afe8a9af9dc req-ee44c725-ffc1-41a7-a409-799dd0c99d46 service nova] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Detach interface failed, port_id=4f16f068-3b53-4a8d-a82b-21114eb371ea, reason: Instance 00b76648-d27b-4002-80cb-366e64c32ecc could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1221.580601] env[68437]: DEBUG oslo_vmware.api [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14675} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.580891] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1221.581141] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1221.581314] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1221.581508] env[68437]: INFO nova.compute.manager [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1221.581787] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1221.581998] env[68437]: DEBUG nova.compute.manager [-] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1221.582122] env[68437]: DEBUG nova.network.neutron [-] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1221.776859] env[68437]: DEBUG nova.network.neutron [req-e803d636-ebe1-4967-9fe7-c93811c182f1 req-d0de6baa-2dfd-497c-8175-ddb5fa7a2afe service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Updated VIF entry in instance network info cache for port bce31050-7ee6-4cbb-8b64-b2bf9be3c649. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1221.777302] env[68437]: DEBUG nova.network.neutron [req-e803d636-ebe1-4967-9fe7-c93811c182f1 req-d0de6baa-2dfd-497c-8175-ddb5fa7a2afe service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Updating instance_info_cache with network_info: [{"id": "bce31050-7ee6-4cbb-8b64-b2bf9be3c649", "address": "fa:16:3e:61:57:10", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbce31050-7e", "ovs_interfaceid": "bce31050-7ee6-4cbb-8b64-b2bf9be3c649", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.793886] env[68437]: INFO nova.compute.manager [-] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Took 1.26 seconds to deallocate network for instance. [ 1221.821651] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 76d97a56-21a2-4363-a987-ef872f056510] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1221.878233] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945075, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.981125] env[68437]: DEBUG nova.compute.manager [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1221.981412] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.982299] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e13e70-04b1-4998-b839-b016a6f35f7f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.990102] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.990325] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8fcf0c71-1d49-474e-b46b-03e8b6334c28 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.996615] env[68437]: DEBUG oslo_vmware.api [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1221.996615] env[68437]: value = "task-2945079" [ 1221.996615] env[68437]: _type = "Task" [ 1221.996615] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.004470] env[68437]: DEBUG oslo_vmware.api [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.281269] env[68437]: DEBUG oslo_concurrency.lockutils [req-e803d636-ebe1-4967-9fe7-c93811c182f1 req-d0de6baa-2dfd-497c-8175-ddb5fa7a2afe service nova] Releasing lock "refresh_cache-ede62837-4ff5-44be-a015-9ea06b9126a5" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.300928] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.301272] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.301473] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.323974] env[68437]: INFO nova.scheduler.client.report [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Deleted allocations for instance 00b76648-d27b-4002-80cb-366e64c32ecc [ 1222.327904] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: bf8deffa-e6a7-4d7f-9cf1-dbc1d6cc9bee] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1222.378064] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945075, 'name': ReconfigVM_Task, 'duration_secs': 1.273843} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.378387] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591140', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'name': 'volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'aff861ed-e792-480a-811e-c157c0606d08', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'serial': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1222.378974] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e63511f-bdb9-4a10-b51d-3b91fcfe1818 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.385108] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1222.385108] env[68437]: value = "task-2945080" [ 1222.385108] env[68437]: _type = "Task" [ 1222.385108] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.392613] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945080, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.413823] env[68437]: DEBUG nova.network.neutron [-] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.506604] env[68437]: DEBUG oslo_vmware.api [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945079, 'name': PowerOffVM_Task, 'duration_secs': 0.224452} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.506873] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.507057] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.507313] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5e2270c-f4c6-4874-8820-fbd68ee775fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.571720] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.572249] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.572677] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleting the datastore file [datastore1] 8ccd7176-55c0-4118-a07e-3c4bdbba9795 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.573159] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59a7f7f1-5fc4-45d9-804e-de05408e6932 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.582503] env[68437]: DEBUG oslo_vmware.api [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1222.582503] env[68437]: value = "task-2945082" [ 1222.582503] env[68437]: _type = "Task" [ 1222.582503] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.592548] env[68437]: DEBUG oslo_vmware.api [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.833631] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 27429c12-ce0a-4e21-ac1b-6862a8063a9f] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1222.835649] env[68437]: DEBUG oslo_concurrency.lockutils [None req-32dd8af2-e244-4982-bd35-7896dfe04cc7 tempest-DeleteServersTestJSON-1806147129 tempest-DeleteServersTestJSON-1806147129-project-member] Lock "00b76648-d27b-4002-80cb-366e64c32ecc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.486s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.895289] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945080, 'name': Rename_Task, 'duration_secs': 0.189122} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.895591] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1222.895848] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53128cf8-5191-4817-8518-c97787bf3fc6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.902589] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1222.902589] env[68437]: value = "task-2945083" [ 1222.902589] env[68437]: _type = "Task" [ 1222.902589] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.911239] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945083, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.915782] env[68437]: INFO nova.compute.manager [-] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Took 1.33 seconds to deallocate network for instance. [ 1223.026931] env[68437]: DEBUG nova.compute.manager [req-3b2ed591-7971-454e-9362-ca89c303d1e9 req-5ecf240d-4657-49e0-9d13-0a5edb93a37b service nova] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Received event network-vif-deleted-bf3637c1-c320-4b68-874d-64e97b9d845e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1223.093413] env[68437]: DEBUG oslo_vmware.api [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153697} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.093628] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.093803] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1223.094320] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.094320] env[68437]: INFO nova.compute.manager [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1223.094466] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1223.094616] env[68437]: DEBUG nova.compute.manager [-] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1223.094709] env[68437]: DEBUG nova.network.neutron [-] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1223.337731] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: b81a414d-51bf-4f08-b0d3-a19a7aa4efe5] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1223.418161] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945083, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.424136] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.424136] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.424136] env[68437]: DEBUG nova.objects.instance [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lazy-loading 'resources' on Instance uuid 55076395-fd1d-48a7-ab85-fe0eb03afa19 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1223.844397] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 987ed4b2-5c7a-4c7b-a7a5-66b4e515b439] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1223.915839] env[68437]: DEBUG oslo_vmware.api [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945083, 'name': PowerOnVM_Task, 'duration_secs': 0.787378} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.916198] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1224.021789] env[68437]: DEBUG nova.compute.manager [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1224.022722] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbf3493-729d-40e8-b861-56a0c4e775fc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.031677] env[68437]: DEBUG nova.network.neutron [-] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.086215] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b4501d-5029-4220-a9ae-dfd3b7d2dc0c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.095937] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff421818-be40-4ca9-99c0-9958c5a41be6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.128507] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11429430-aa50-46b3-ad07-d6ae507c2077 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.135646] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76108ed6-58b3-4bc4-8ac3-03b96993ad97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.148930] env[68437]: DEBUG nova.compute.provider_tree [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1224.347754] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 832697dc-53ec-406d-b698-d10766bd8f9d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1224.536314] env[68437]: INFO nova.compute.manager [-] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Took 1.44 seconds to deallocate network for instance. [ 1224.548556] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e49b1f29-aaf7-4789-9d1a-4a48403a3b1b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 33.136s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.676347] env[68437]: ERROR nova.scheduler.client.report [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [req-858ba8f4-5439-42ba-a159-3242723b6ba4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 422e986f-b38b-46ad-94b3-91f3ccd10a05. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-858ba8f4-5439-42ba-a159-3242723b6ba4"}]} [ 1224.690680] env[68437]: DEBUG nova.scheduler.client.report [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1224.704403] env[68437]: DEBUG nova.scheduler.client.report [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1224.704587] env[68437]: DEBUG nova.compute.provider_tree [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1224.715450] env[68437]: DEBUG nova.scheduler.client.report [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1224.735487] env[68437]: DEBUG nova.scheduler.client.report [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1224.850645] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ee0450b5-66ce-41ed-9f4f-7ffa7b46f769] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1224.868816] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646fb593-1f2a-4df7-bd09-b600f6ce0aca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.876773] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d441cd-6156-419d-80c8-1f694a5e52a6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.908799] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f704855-54d4-40f2-af1b-99b55dfb7642 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.916182] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d197f0-6b80-496a-bcac-29f3ba0eb4e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.929690] env[68437]: DEBUG nova.compute.provider_tree [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.047105] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.057760] env[68437]: DEBUG nova.compute.manager [req-e9681a6e-20cc-40fe-b3f7-3f90f713954c req-9a5badc6-ea50-4694-ba68-7934c4bf374e service nova] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Received event network-vif-deleted-4ec75d03-3ee1-480d-ab6a-acc211fd6bae {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1225.354486] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 33cc7565-9cd0-47a7-afe2-ac3849ba7ac3] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1225.460734] env[68437]: DEBUG nova.scheduler.client.report [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updated inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with generation 142 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1225.461056] env[68437]: DEBUG nova.compute.provider_tree [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 142 to 143 during operation: update_inventory {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1225.461213] env[68437]: DEBUG nova.compute.provider_tree [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1225.857658] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 75a9fb57-5796-4853-b429-6e8ea7aba1de] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1225.966529] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.544s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.968714] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.922s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.968919] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.986537] env[68437]: INFO nova.scheduler.client.report [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted allocations for instance 8ccd7176-55c0-4118-a07e-3c4bdbba9795 [ 1225.992996] env[68437]: INFO nova.scheduler.client.report [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted allocations for instance 55076395-fd1d-48a7-ab85-fe0eb03afa19 [ 1226.361315] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: bd3721bf-74fb-41b3-8090-1b370c0ea9fb] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1226.493941] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a021df9-2ff0-40f1-841f-2baa22c942cf tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "8ccd7176-55c0-4118-a07e-3c4bdbba9795" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.022s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.496999] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3db159f3-67b3-49fd-84a2-cf347f9b3a5e tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "55076395-fd1d-48a7-ab85-fe0eb03afa19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.546s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.765030] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "7a05d783-afac-43a1-a715-c83b42c990c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.765030] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "7a05d783-afac-43a1-a715-c83b42c990c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.765030] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "7a05d783-afac-43a1-a715-c83b42c990c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.765369] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "7a05d783-afac-43a1-a715-c83b42c990c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.765447] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "7a05d783-afac-43a1-a715-c83b42c990c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.768126] env[68437]: INFO nova.compute.manager [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Terminating instance [ 1226.865103] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ccad008b-0a3a-4234-9c4c-c3a5230a938e] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1227.272234] env[68437]: DEBUG nova.compute.manager [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1227.272480] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1227.273430] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d4a7f0-dce9-4b4a-9d68-10c4a0d5bb97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.284079] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1227.284405] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-188dfc68-a4ca-4efc-9581-420d14c9f83e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.290946] env[68437]: DEBUG oslo_vmware.api [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1227.290946] env[68437]: value = "task-2945087" [ 1227.290946] env[68437]: _type = "Task" [ 1227.290946] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.300474] env[68437]: DEBUG oslo_vmware.api [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.369357] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: f8f8302e-53ab-47e8-adc6-ccf6fd5a8a28] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1227.791048] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "524c1b76-3563-482d-a676-26fa6c28a3c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.791301] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.802337] env[68437]: DEBUG oslo_vmware.api [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945087, 'name': PowerOffVM_Task, 'duration_secs': 0.285782} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.803193] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1227.803423] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1227.803706] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ead8da57-9f29-4554-a97d-3eb0c653a67d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.873737] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: e51356e4-7647-4678-bb4f-f069b5c7fef6] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1227.875663] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1227.876381] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1227.876381] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleting the datastore file [datastore2] 7a05d783-afac-43a1-a715-c83b42c990c2 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1227.876500] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a8ce6ab-40a1-4365-8172-119f26eeccb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.883692] env[68437]: DEBUG oslo_vmware.api [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1227.883692] env[68437]: value = "task-2945089" [ 1227.883692] env[68437]: _type = "Task" [ 1227.883692] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.892682] env[68437]: DEBUG oslo_vmware.api [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.296838] env[68437]: DEBUG nova.compute.manager [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1228.377763] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 05e07d7c-0161-463c-89f7-1bf28f680bde] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1228.393853] env[68437]: DEBUG oslo_vmware.api [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141253} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.394257] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1228.394518] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1228.394720] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1228.394892] env[68437]: INFO nova.compute.manager [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1228.395162] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1228.395491] env[68437]: DEBUG nova.compute.manager [-] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1228.395491] env[68437]: DEBUG nova.network.neutron [-] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1228.658031] env[68437]: DEBUG nova.compute.manager [req-838c366e-22d5-451f-a8a5-08c7b862573f req-171c9fb7-d89e-4edb-8761-3a791ad9f556 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Received event network-vif-deleted-dabb93d9-4501-4176-a8b1-cea28a047927 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1228.658031] env[68437]: INFO nova.compute.manager [req-838c366e-22d5-451f-a8a5-08c7b862573f req-171c9fb7-d89e-4edb-8761-3a791ad9f556 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Neutron deleted interface dabb93d9-4501-4176-a8b1-cea28a047927; detaching it from the instance and deleting it from the info cache [ 1228.658370] env[68437]: DEBUG nova.network.neutron [req-838c366e-22d5-451f-a8a5-08c7b862573f req-171c9fb7-d89e-4edb-8761-3a791ad9f556 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.821048] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.821048] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.823266] env[68437]: INFO nova.compute.claims [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1228.881543] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ae32443d-3b55-4bd7-8f07-e66d206ec1d1] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1229.132873] env[68437]: DEBUG nova.network.neutron [-] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.161439] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ffa88083-7eb9-4116-9a33-5ea19b819ef3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.171442] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438bbde6-a765-4fa7-a854-df189b5bcb6b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.201743] env[68437]: DEBUG nova.compute.manager [req-838c366e-22d5-451f-a8a5-08c7b862573f req-171c9fb7-d89e-4edb-8761-3a791ad9f556 service nova] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Detach interface failed, port_id=dabb93d9-4501-4176-a8b1-cea28a047927, reason: Instance 7a05d783-afac-43a1-a715-c83b42c990c2 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1229.384530] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 697d5011-fb4e-4542-851b-39953bbb293d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1229.635779] env[68437]: INFO nova.compute.manager [-] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Took 1.24 seconds to deallocate network for instance. [ 1229.887823] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 7422ff70-901c-4343-9b9f-f12c52348d2c] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1229.947786] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069c6e53-76fe-432f-9cc1-585b3ceb139c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.955802] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490af136-94ae-4485-8211-c91620ef9e1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.987448] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea8a6dd-53f4-4416-8912-5b114382f9f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.994870] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fc4203-991a-409a-9105-532d1524b49e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.008554] env[68437]: DEBUG nova.compute.provider_tree [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.141618] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.392155] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ada623a8-b0ce-4709-b2af-ad80b464af4e] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1230.512485] env[68437]: DEBUG nova.scheduler.client.report [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1230.891025] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "8554a78c-c2d7-459d-a295-121da777dfd4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.891025] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.895481] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 9c7ea13c-0a50-4ba5-b84d-bf50dc2318b5] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1231.017820] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.197s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.018376] env[68437]: DEBUG nova.compute.manager [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1231.021219] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.880s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.021489] env[68437]: DEBUG nova.objects.instance [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lazy-loading 'resources' on Instance uuid 7a05d783-afac-43a1-a715-c83b42c990c2 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1231.394666] env[68437]: DEBUG nova.compute.utils [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1231.397813] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 3a2dad52-63d3-46ec-ac43-3922bca3919e] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1231.525021] env[68437]: DEBUG nova.compute.utils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1231.526464] env[68437]: DEBUG nova.compute.manager [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1231.526672] env[68437]: DEBUG nova.network.neutron [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1231.584014] env[68437]: DEBUG nova.policy [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e4b1b3012874778bc147c3e7b00133c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6f6382f6c6843529a37d7c62837523a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1231.661806] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b0db8c-a2ca-4f11-be27-f2fb7e72c080 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.671494] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2201707-f075-4201-a6d9-72cd7e108154 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.704263] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5708d1-66b4-4bf6-8577-b2f4f971e423 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.712855] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e88cfc-66e2-4c2d-a677-493534d20e3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.727424] env[68437]: DEBUG nova.compute.provider_tree [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1231.882636] env[68437]: DEBUG nova.network.neutron [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Successfully created port: f6919baa-a381-4bb9-bb35-d535d859a1e4 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1231.898779] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.900363] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 3f82b137-81d5-4754-b222-3cefce0b2a10] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1232.032111] env[68437]: DEBUG nova.compute.manager [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1232.247857] env[68437]: ERROR nova.scheduler.client.report [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [req-7efff781-6e8e-4323-b6a3-55b4fd55bc7a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 422e986f-b38b-46ad-94b3-91f3ccd10a05. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7efff781-6e8e-4323-b6a3-55b4fd55bc7a"}]} [ 1232.264869] env[68437]: DEBUG nova.scheduler.client.report [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1232.279235] env[68437]: DEBUG nova.scheduler.client.report [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1232.279469] env[68437]: DEBUG nova.compute.provider_tree [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1232.290890] env[68437]: DEBUG nova.scheduler.client.report [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1232.308711] env[68437]: DEBUG nova.scheduler.client.report [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1232.404092] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 2f13fd89-eb0b-4ff7-aebb-d75cd621bb2c] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1232.410440] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88285429-3e5a-42c2-bb5d-5572a9454b44 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.418965] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767658a3-3049-43e3-b4a6-a5dc60cff431 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.452590] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1806161-b609-4915-a006-56b98e479cc6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.462888] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dde2c45-b9f2-44ac-b977-049be031fdb9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.481708] env[68437]: DEBUG nova.compute.provider_tree [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1232.907701] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: c4ff8947-7129-4bd4-ab3c-3ecccae0e1dd] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1232.969974] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "8554a78c-c2d7-459d-a295-121da777dfd4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.970339] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.970618] env[68437]: INFO nova.compute.manager [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Attaching volume 3e327e5e-d2d8-43e7-8b82-99613c5c5698 to /dev/sdb [ 1233.000518] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10f0c36-3b98-4f65-aff5-22e74ebad676 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.008670] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202abd9e-935e-4d1f-844a-d05b1cc1e43a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.014328] env[68437]: DEBUG nova.scheduler.client.report [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updated inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with generation 144 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1233.014573] env[68437]: DEBUG nova.compute.provider_tree [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 144 to 145 during operation: update_inventory {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1233.014753] env[68437]: DEBUG nova.compute.provider_tree [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1233.022894] env[68437]: DEBUG nova.virt.block_device [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Updating existing volume attachment record: 01bfe1a5-83b0-4f26-b325-d27d822b5def {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1233.041293] env[68437]: DEBUG nova.compute.manager [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1233.067400] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1233.067648] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1233.067806] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1233.067988] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1233.068178] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1233.068329] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1233.068541] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1233.068745] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1233.068859] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1233.069032] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1233.069212] env[68437]: DEBUG nova.virt.hardware [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1233.070064] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc4a9d5-b9d9-4ebb-8744-c175fc2bee11 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.077708] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba69237-e001-43dd-b366-d62cae29b5b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.250389] env[68437]: DEBUG nova.compute.manager [req-33ba4e34-10e3-49b7-8d5d-fd4799108e38 req-db16f0c4-cd77-4d86-87d9-1db1f8989254 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Received event network-vif-plugged-f6919baa-a381-4bb9-bb35-d535d859a1e4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1233.250671] env[68437]: DEBUG oslo_concurrency.lockutils [req-33ba4e34-10e3-49b7-8d5d-fd4799108e38 req-db16f0c4-cd77-4d86-87d9-1db1f8989254 service nova] Acquiring lock "524c1b76-3563-482d-a676-26fa6c28a3c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.251013] env[68437]: DEBUG oslo_concurrency.lockutils [req-33ba4e34-10e3-49b7-8d5d-fd4799108e38 req-db16f0c4-cd77-4d86-87d9-1db1f8989254 service nova] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.251577] env[68437]: DEBUG oslo_concurrency.lockutils [req-33ba4e34-10e3-49b7-8d5d-fd4799108e38 req-db16f0c4-cd77-4d86-87d9-1db1f8989254 service nova] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.251898] env[68437]: DEBUG nova.compute.manager [req-33ba4e34-10e3-49b7-8d5d-fd4799108e38 req-db16f0c4-cd77-4d86-87d9-1db1f8989254 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] No waiting events found dispatching network-vif-plugged-f6919baa-a381-4bb9-bb35-d535d859a1e4 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1233.252251] env[68437]: WARNING nova.compute.manager [req-33ba4e34-10e3-49b7-8d5d-fd4799108e38 req-db16f0c4-cd77-4d86-87d9-1db1f8989254 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Received unexpected event network-vif-plugged-f6919baa-a381-4bb9-bb35-d535d859a1e4 for instance with vm_state building and task_state spawning. [ 1233.343401] env[68437]: DEBUG nova.network.neutron [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Successfully updated port: f6919baa-a381-4bb9-bb35-d535d859a1e4 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1233.411494] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 4f46132c-155d-4def-b017-7fd84e37eed5] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1233.519734] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.498s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.539036] env[68437]: INFO nova.scheduler.client.report [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted allocations for instance 7a05d783-afac-43a1-a715-c83b42c990c2 [ 1233.846415] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.846579] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1233.846728] env[68437]: DEBUG nova.network.neutron [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1233.914725] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: aaa2a858-9cc0-4b5a-8729-80e5440b530d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1234.047105] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a5dfbfab-cd02-4f85-9342-06cdecfeee6d tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "7a05d783-afac-43a1-a715-c83b42c990c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.282s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.381512] env[68437]: DEBUG nova.network.neutron [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1234.418993] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 6d09b9e8-f701-4548-8ec3-c1d9e69223ee] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1234.505614] env[68437]: DEBUG nova.network.neutron [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance_info_cache with network_info: [{"id": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "address": "fa:16:3e:51:30:91", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6919baa-a3", "ovs_interfaceid": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.819012] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.819012] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.922827] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 221fcaf9-e17a-4594-90be-9dd49e7df424] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1235.007997] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.008343] env[68437]: DEBUG nova.compute.manager [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Instance network_info: |[{"id": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "address": "fa:16:3e:51:30:91", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6919baa-a3", "ovs_interfaceid": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1235.008757] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:30:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6919baa-a381-4bb9-bb35-d535d859a1e4', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1235.016423] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1235.016887] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1235.017131] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-673906da-46cd-428d-ad95-f486b1c39a07 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.038069] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1235.038069] env[68437]: value = "task-2945095" [ 1235.038069] env[68437]: _type = "Task" [ 1235.038069] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.045909] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945095, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.280266] env[68437]: DEBUG nova.compute.manager [req-8a3067cd-781f-4dfd-917e-9d76f866ec93 req-8e7693fa-5902-4e05-9d5b-f1aeb662ab1e service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Received event network-changed-f6919baa-a381-4bb9-bb35-d535d859a1e4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1235.280266] env[68437]: DEBUG nova.compute.manager [req-8a3067cd-781f-4dfd-917e-9d76f866ec93 req-8e7693fa-5902-4e05-9d5b-f1aeb662ab1e service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Refreshing instance network info cache due to event network-changed-f6919baa-a381-4bb9-bb35-d535d859a1e4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1235.280266] env[68437]: DEBUG oslo_concurrency.lockutils [req-8a3067cd-781f-4dfd-917e-9d76f866ec93 req-8e7693fa-5902-4e05-9d5b-f1aeb662ab1e service nova] Acquiring lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.280266] env[68437]: DEBUG oslo_concurrency.lockutils [req-8a3067cd-781f-4dfd-917e-9d76f866ec93 req-8e7693fa-5902-4e05-9d5b-f1aeb662ab1e service nova] Acquired lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.280266] env[68437]: DEBUG nova.network.neutron [req-8a3067cd-781f-4dfd-917e-9d76f866ec93 req-8e7693fa-5902-4e05-9d5b-f1aeb662ab1e service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Refreshing network info cache for port f6919baa-a381-4bb9-bb35-d535d859a1e4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1235.322215] env[68437]: DEBUG nova.compute.manager [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1235.426675] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 4254002c-d292-4f10-a3d0-387853dbbcb3] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1235.548570] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945095, 'name': CreateVM_Task, 'duration_secs': 0.330732} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.548747] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1235.549456] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.549673] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.550038] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1235.550307] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38a6f49d-dd44-4153-a038-f9dbfeba6d20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.555647] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1235.555647] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522796f2-baac-1fe1-cfcb-5cee9c5d1572" [ 1235.555647] env[68437]: _type = "Task" [ 1235.555647] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.563615] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522796f2-baac-1fe1-cfcb-5cee9c5d1572, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.845060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.846037] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.846950] env[68437]: INFO nova.compute.claims [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1235.930360] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 098010b8-b7f7-4bd1-a42c-7fc4dcaa666e] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1235.994078] env[68437]: DEBUG nova.network.neutron [req-8a3067cd-781f-4dfd-917e-9d76f866ec93 req-8e7693fa-5902-4e05-9d5b-f1aeb662ab1e service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updated VIF entry in instance network info cache for port f6919baa-a381-4bb9-bb35-d535d859a1e4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1235.994442] env[68437]: DEBUG nova.network.neutron [req-8a3067cd-781f-4dfd-917e-9d76f866ec93 req-8e7693fa-5902-4e05-9d5b-f1aeb662ab1e service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance_info_cache with network_info: [{"id": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "address": "fa:16:3e:51:30:91", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6919baa-a3", "ovs_interfaceid": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.067062] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522796f2-baac-1fe1-cfcb-5cee9c5d1572, 'name': SearchDatastore_Task, 'duration_secs': 0.011121} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.067313] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.067579] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1236.067829] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.067990] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.068211] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1236.068456] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abce8d94-60c0-4924-b47f-98b7aae7a276 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.077553] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1236.077729] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1236.078448] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c6a7685-d675-4e4b-9951-0ccab1a9b4d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.083581] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1236.083581] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520cc8f6-3e8b-090e-bb9c-3a8308fcdd26" [ 1236.083581] env[68437]: _type = "Task" [ 1236.083581] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.090999] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520cc8f6-3e8b-090e-bb9c-3a8308fcdd26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.435631] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 832c99fc-0f09-4ccb-96f9-894ce62eb17e] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1236.497208] env[68437]: DEBUG oslo_concurrency.lockutils [req-8a3067cd-781f-4dfd-917e-9d76f866ec93 req-8e7693fa-5902-4e05-9d5b-f1aeb662ab1e service nova] Releasing lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.594652] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520cc8f6-3e8b-090e-bb9c-3a8308fcdd26, 'name': SearchDatastore_Task, 'duration_secs': 0.009555} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.595481] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98e84b86-0b0a-4e0b-bbfe-3d1b6df4cda0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.601427] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1236.601427] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52fd41c3-5f60-4e04-fa67-6ecc63dcd9b0" [ 1236.601427] env[68437]: _type = "Task" [ 1236.601427] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.609249] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fd41c3-5f60-4e04-fa67-6ecc63dcd9b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.938393] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: c9d26fd4-f780-4986-8a5f-dea041a70f5d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1236.972728] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5007b731-8a3c-473e-9e6a-e0a41ac15be8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.981570] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de75d5e7-ec81-4672-84e9-3a1fd0b71b24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.014225] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46564b9-7e43-430d-b190-0c5fff366c34 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.022437] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004dea56-f729-4119-acef-804beb4cc9af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.036349] env[68437]: DEBUG nova.compute.provider_tree [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.111515] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52fd41c3-5f60-4e04-fa67-6ecc63dcd9b0, 'name': SearchDatastore_Task, 'duration_secs': 0.01026} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.111727] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.111979] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 524c1b76-3563-482d-a676-26fa6c28a3c7/524c1b76-3563-482d-a676-26fa6c28a3c7.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1237.112249] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0453e3b-a9bb-474b-959c-0eab6e6de7eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.119851] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1237.119851] env[68437]: value = "task-2945098" [ 1237.119851] env[68437]: _type = "Task" [ 1237.119851] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.127469] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.442505] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: d84c599e-29b2-45ec-a3f7-54ef85af9a3d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1237.539646] env[68437]: DEBUG nova.scheduler.client.report [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1237.568349] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1237.568613] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591147', 'volume_id': '3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'name': 'volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8554a78c-c2d7-459d-a295-121da777dfd4', 'attached_at': '', 'detached_at': '', 'volume_id': '3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'serial': '3e327e5e-d2d8-43e7-8b82-99613c5c5698'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1237.569534] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f7535e-5596-4853-9c81-042c605b83b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.587258] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee4ea6e-b816-4bea-84ba-f9db91644436 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.611789] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698/volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1237.612051] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74f5979e-d000-4e43-af90-ff7040124f97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.632905] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945098, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4359} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.634125] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 524c1b76-3563-482d-a676-26fa6c28a3c7/524c1b76-3563-482d-a676-26fa6c28a3c7.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1237.634339] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1237.634641] env[68437]: DEBUG oslo_vmware.api [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1237.634641] env[68437]: value = "task-2945099" [ 1237.634641] env[68437]: _type = "Task" [ 1237.634641] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.634826] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f5b1220-511e-4170-a65e-d10dd72e72ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.645603] env[68437]: DEBUG oslo_vmware.api [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.646765] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1237.646765] env[68437]: value = "task-2945100" [ 1237.646765] env[68437]: _type = "Task" [ 1237.646765] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.654514] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945100, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.946797] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 6d877579-3095-4ee9-bb3e-4d5a9122f1ed] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1238.044296] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.199s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.044842] env[68437]: DEBUG nova.compute.manager [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1238.147043] env[68437]: DEBUG oslo_vmware.api [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945099, 'name': ReconfigVM_Task, 'duration_secs': 0.367286} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.147422] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698/volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1238.154716] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-597b7c54-3b22-4f23-b69f-45c9b0422d10 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.170086] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945100, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062134} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.171220] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1238.171534] env[68437]: DEBUG oslo_vmware.api [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1238.171534] env[68437]: value = "task-2945101" [ 1238.171534] env[68437]: _type = "Task" [ 1238.171534] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.172183] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caeeb2ab-dee1-4d25-8916-50ff1435bd0f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.182486] env[68437]: DEBUG oslo_vmware.api [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945101, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.200781] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 524c1b76-3563-482d-a676-26fa6c28a3c7/524c1b76-3563-482d-a676-26fa6c28a3c7.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1238.200937] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6d40714-f39c-462d-81be-39f4895edca4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.220452] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1238.220452] env[68437]: value = "task-2945102" [ 1238.220452] env[68437]: _type = "Task" [ 1238.220452] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.228247] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945102, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.450717] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 53c4ca02-2bc3-4a55-9aea-0e0dd669a37c] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1238.549791] env[68437]: DEBUG nova.compute.utils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1238.551630] env[68437]: DEBUG nova.compute.manager [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1238.551839] env[68437]: DEBUG nova.network.neutron [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1238.589906] env[68437]: DEBUG nova.policy [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f745cbd7edb641af8623447b00021ac6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b7dfebb79e54e4fba7e0b142f99d7eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1238.685868] env[68437]: DEBUG oslo_vmware.api [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945101, 'name': ReconfigVM_Task, 'duration_secs': 0.148653} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.686188] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591147', 'volume_id': '3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'name': 'volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8554a78c-c2d7-459d-a295-121da777dfd4', 'attached_at': '', 'detached_at': '', 'volume_id': '3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'serial': '3e327e5e-d2d8-43e7-8b82-99613c5c5698'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1238.730581] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945102, 'name': ReconfigVM_Task, 'duration_secs': 0.291082} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.730844] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 524c1b76-3563-482d-a676-26fa6c28a3c7/524c1b76-3563-482d-a676-26fa6c28a3c7.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1238.731557] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cc3366b-627f-4560-a87b-454570c59145 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.738348] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1238.738348] env[68437]: value = "task-2945103" [ 1238.738348] env[68437]: _type = "Task" [ 1238.738348] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.745856] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945103, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.846160] env[68437]: DEBUG nova.network.neutron [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Successfully created port: 087445ab-aaa6-4608-8412-adbe25287f5e {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1238.954136] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ba0d8067-a617-4910-b2f6-33a7be461f8e] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1239.057986] env[68437]: DEBUG nova.compute.manager [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1239.248370] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945103, 'name': Rename_Task, 'duration_secs': 0.146404} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.248621] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1239.248865] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7fcbb579-0533-46ae-9022-ab891c210750 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.256858] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1239.256858] env[68437]: value = "task-2945104" [ 1239.256858] env[68437]: _type = "Task" [ 1239.256858] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.264905] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.458796] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: b7706bf2-936f-439c-8e9f-b2241d0c211c] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1239.720653] env[68437]: DEBUG nova.objects.instance [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'flavor' on Instance uuid 8554a78c-c2d7-459d-a295-121da777dfd4 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.767191] env[68437]: DEBUG oslo_vmware.api [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945104, 'name': PowerOnVM_Task, 'duration_secs': 0.44364} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.767488] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1239.767680] env[68437]: INFO nova.compute.manager [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Took 6.73 seconds to spawn the instance on the hypervisor. [ 1239.767860] env[68437]: DEBUG nova.compute.manager [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1239.768667] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c4bb7d-51fc-4e5a-9b0e-49fd36109497 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.962371] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: a01364f9-e30d-4140-ae41-1e7c4aaa2251] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1240.065428] env[68437]: DEBUG nova.compute.manager [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1240.090284] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "0a246b14-5078-4549-a270-73f99a1647c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.090391] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.093694] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1240.093694] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1240.093694] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1240.093906] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1240.093984] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1240.094137] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1240.094358] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1240.094517] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1240.094675] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1240.094828] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1240.094997] env[68437]: DEBUG nova.virt.hardware [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1240.096287] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3501dd8d-753c-4883-bef0-3dcd3d92a37d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.105073] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d4e828-0257-4ded-b029-bcb0c4ab4920 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.203877] env[68437]: DEBUG nova.compute.manager [req-113df91e-e96b-4426-a658-ea6db7849620 req-9793c68f-a954-4120-8772-9773489eb935 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Received event network-vif-plugged-087445ab-aaa6-4608-8412-adbe25287f5e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1240.204047] env[68437]: DEBUG oslo_concurrency.lockutils [req-113df91e-e96b-4426-a658-ea6db7849620 req-9793c68f-a954-4120-8772-9773489eb935 service nova] Acquiring lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.204301] env[68437]: DEBUG oslo_concurrency.lockutils [req-113df91e-e96b-4426-a658-ea6db7849620 req-9793c68f-a954-4120-8772-9773489eb935 service nova] Lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.204479] env[68437]: DEBUG oslo_concurrency.lockutils [req-113df91e-e96b-4426-a658-ea6db7849620 req-9793c68f-a954-4120-8772-9773489eb935 service nova] Lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.204645] env[68437]: DEBUG nova.compute.manager [req-113df91e-e96b-4426-a658-ea6db7849620 req-9793c68f-a954-4120-8772-9773489eb935 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] No waiting events found dispatching network-vif-plugged-087445ab-aaa6-4608-8412-adbe25287f5e {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1240.204806] env[68437]: WARNING nova.compute.manager [req-113df91e-e96b-4426-a658-ea6db7849620 req-9793c68f-a954-4120-8772-9773489eb935 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Received unexpected event network-vif-plugged-087445ab-aaa6-4608-8412-adbe25287f5e for instance with vm_state building and task_state spawning. [ 1240.225346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e30513-963f-4655-a954-93192da95770 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.255s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.293185] env[68437]: INFO nova.compute.manager [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Took 11.49 seconds to build instance. [ 1240.299586] env[68437]: DEBUG nova.network.neutron [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Successfully updated port: 087445ab-aaa6-4608-8412-adbe25287f5e {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1240.435123] env[68437]: DEBUG oslo_concurrency.lockutils [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "8554a78c-c2d7-459d-a295-121da777dfd4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.435403] env[68437]: DEBUG oslo_concurrency.lockutils [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.465724] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 7ae346fa-fbb2-4fd7-b620-f0dda8243ca8] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1240.600045] env[68437]: DEBUG nova.compute.manager [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1240.794445] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ba487df8-2826-41b8-9e9b-e89d6c1d2fc2 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.003s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.802184] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "refresh_cache-eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.802332] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "refresh_cache-eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.802481] env[68437]: DEBUG nova.network.neutron [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1240.895559] env[68437]: DEBUG nova.compute.manager [req-6c960c2b-affc-40aa-bbc3-6b9b095c7d0e req-f611665c-fe36-4811-a556-b0bf49fd6714 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Received event network-changed-f6919baa-a381-4bb9-bb35-d535d859a1e4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1240.895742] env[68437]: DEBUG nova.compute.manager [req-6c960c2b-affc-40aa-bbc3-6b9b095c7d0e req-f611665c-fe36-4811-a556-b0bf49fd6714 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Refreshing instance network info cache due to event network-changed-f6919baa-a381-4bb9-bb35-d535d859a1e4. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1240.895972] env[68437]: DEBUG oslo_concurrency.lockutils [req-6c960c2b-affc-40aa-bbc3-6b9b095c7d0e req-f611665c-fe36-4811-a556-b0bf49fd6714 service nova] Acquiring lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.896126] env[68437]: DEBUG oslo_concurrency.lockutils [req-6c960c2b-affc-40aa-bbc3-6b9b095c7d0e req-f611665c-fe36-4811-a556-b0bf49fd6714 service nova] Acquired lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1240.896291] env[68437]: DEBUG nova.network.neutron [req-6c960c2b-affc-40aa-bbc3-6b9b095c7d0e req-f611665c-fe36-4811-a556-b0bf49fd6714 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Refreshing network info cache for port f6919baa-a381-4bb9-bb35-d535d859a1e4 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1240.938121] env[68437]: INFO nova.compute.manager [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Detaching volume 3e327e5e-d2d8-43e7-8b82-99613c5c5698 [ 1240.968785] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 29e9555b-f928-43e7-a3a3-869ed07d7326] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1240.979912] env[68437]: INFO nova.virt.block_device [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Attempting to driver detach volume 3e327e5e-d2d8-43e7-8b82-99613c5c5698 from mountpoint /dev/sdb [ 1240.980163] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1240.980360] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591147', 'volume_id': '3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'name': 'volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8554a78c-c2d7-459d-a295-121da777dfd4', 'attached_at': '', 'detached_at': '', 'volume_id': '3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'serial': '3e327e5e-d2d8-43e7-8b82-99613c5c5698'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1240.981252] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c167138a-b5c8-4e42-bd1c-dc49ab8d98f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.006758] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f61bafe-1d2f-4483-98a5-8b004fd27f9f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.014823] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7167dc-c861-4e2a-aac0-a856444aa512 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.036085] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c2d7b2-2773-472d-94be-897a6fe68fe6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.050796] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] The volume has not been displaced from its original location: [datastore1] volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698/volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1241.056054] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1241.056596] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1cdfd68-13a5-488d-ae45-a052a1666639 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.074681] env[68437]: DEBUG oslo_vmware.api [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1241.074681] env[68437]: value = "task-2945105" [ 1241.074681] env[68437]: _type = "Task" [ 1241.074681] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.082476] env[68437]: DEBUG oslo_vmware.api [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945105, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.122973] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.123333] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.124838] env[68437]: INFO nova.compute.claims [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1241.333068] env[68437]: DEBUG nova.network.neutron [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1241.474831] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 013a92cc-0fc2-4e85-aee6-efb62bae4dcb] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1241.498103] env[68437]: DEBUG nova.network.neutron [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Updating instance_info_cache with network_info: [{"id": "087445ab-aaa6-4608-8412-adbe25287f5e", "address": "fa:16:3e:ae:ff:db", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087445ab-aa", "ovs_interfaceid": "087445ab-aaa6-4608-8412-adbe25287f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.585979] env[68437]: DEBUG oslo_vmware.api [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945105, 'name': ReconfigVM_Task, 'duration_secs': 0.225223} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.588617] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1241.593365] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-653fc7a0-7779-4583-8da4-037265788dfc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.611356] env[68437]: DEBUG oslo_vmware.api [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1241.611356] env[68437]: value = "task-2945106" [ 1241.611356] env[68437]: _type = "Task" [ 1241.611356] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.623691] env[68437]: DEBUG oslo_vmware.api [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945106, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.670680] env[68437]: DEBUG nova.network.neutron [req-6c960c2b-affc-40aa-bbc3-6b9b095c7d0e req-f611665c-fe36-4811-a556-b0bf49fd6714 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updated VIF entry in instance network info cache for port f6919baa-a381-4bb9-bb35-d535d859a1e4. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1241.671126] env[68437]: DEBUG nova.network.neutron [req-6c960c2b-affc-40aa-bbc3-6b9b095c7d0e req-f611665c-fe36-4811-a556-b0bf49fd6714 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance_info_cache with network_info: [{"id": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "address": "fa:16:3e:51:30:91", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6919baa-a3", "ovs_interfaceid": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.978577] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: d5db3112-88c7-43af-a434-b91ca69f8559] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1242.001785] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "refresh_cache-eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.002158] env[68437]: DEBUG nova.compute.manager [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Instance network_info: |[{"id": "087445ab-aaa6-4608-8412-adbe25287f5e", "address": "fa:16:3e:ae:ff:db", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087445ab-aa", "ovs_interfaceid": "087445ab-aaa6-4608-8412-adbe25287f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1242.002665] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:ff:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '087445ab-aaa6-4608-8412-adbe25287f5e', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1242.011157] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1242.012292] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1242.012550] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c293b4c-df75-4ea0-993d-08e3d520ee68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.040545] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1242.040545] env[68437]: value = "task-2945107" [ 1242.040545] env[68437]: _type = "Task" [ 1242.040545] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.055629] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945107, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.121337] env[68437]: DEBUG oslo_vmware.api [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945106, 'name': ReconfigVM_Task, 'duration_secs': 0.432574} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.121684] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591147', 'volume_id': '3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'name': 'volume-3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8554a78c-c2d7-459d-a295-121da777dfd4', 'attached_at': '', 'detached_at': '', 'volume_id': '3e327e5e-d2d8-43e7-8b82-99613c5c5698', 'serial': '3e327e5e-d2d8-43e7-8b82-99613c5c5698'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1242.175933] env[68437]: DEBUG oslo_concurrency.lockutils [req-6c960c2b-affc-40aa-bbc3-6b9b095c7d0e req-f611665c-fe36-4811-a556-b0bf49fd6714 service nova] Releasing lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.228339] env[68437]: DEBUG nova.compute.manager [req-0df14260-e850-45d7-9a3c-28b7b3665118 req-6269ef6a-8132-4a25-b7dd-8b717f1b1901 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Received event network-changed-087445ab-aaa6-4608-8412-adbe25287f5e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1242.228527] env[68437]: DEBUG nova.compute.manager [req-0df14260-e850-45d7-9a3c-28b7b3665118 req-6269ef6a-8132-4a25-b7dd-8b717f1b1901 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Refreshing instance network info cache due to event network-changed-087445ab-aaa6-4608-8412-adbe25287f5e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1242.228778] env[68437]: DEBUG oslo_concurrency.lockutils [req-0df14260-e850-45d7-9a3c-28b7b3665118 req-6269ef6a-8132-4a25-b7dd-8b717f1b1901 service nova] Acquiring lock "refresh_cache-eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.228870] env[68437]: DEBUG oslo_concurrency.lockutils [req-0df14260-e850-45d7-9a3c-28b7b3665118 req-6269ef6a-8132-4a25-b7dd-8b717f1b1901 service nova] Acquired lock "refresh_cache-eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.229037] env[68437]: DEBUG nova.network.neutron [req-0df14260-e850-45d7-9a3c-28b7b3665118 req-6269ef6a-8132-4a25-b7dd-8b717f1b1901 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Refreshing network info cache for port 087445ab-aaa6-4608-8412-adbe25287f5e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1242.265456] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9bd6428-f4b7-48a5-a5c2-e38913724992 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.277277] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1272af-3417-4e08-9d4b-d04d2f0dbf8a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.310885] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4753408-ee93-4e3a-a1af-978cf46c737d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.318794] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13048bc1-be8a-45c9-aa3e-0889d548892e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.334387] env[68437]: DEBUG nova.compute.provider_tree [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1242.482934] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: cf691a81-60e3-40ed-ba80-8f481ff2554b] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1242.550978] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945107, 'name': CreateVM_Task, 'duration_secs': 0.346754} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.551236] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1242.551915] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.552097] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.552429] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1242.552683] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe033fa4-64f0-4179-8ae5-4b235ab70303 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.557650] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1242.557650] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d39fe1-541d-d94e-6a97-20a3b746f038" [ 1242.557650] env[68437]: _type = "Task" [ 1242.557650] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.565573] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d39fe1-541d-d94e-6a97-20a3b746f038, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.666764] env[68437]: DEBUG nova.objects.instance [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'flavor' on Instance uuid 8554a78c-c2d7-459d-a295-121da777dfd4 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1242.854157] env[68437]: ERROR nova.scheduler.client.report [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [req-3185cf23-4942-4a9d-92a2-2788b16ea94a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 422e986f-b38b-46ad-94b3-91f3ccd10a05. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3185cf23-4942-4a9d-92a2-2788b16ea94a"}]} [ 1242.873041] env[68437]: DEBUG nova.scheduler.client.report [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1242.886349] env[68437]: DEBUG nova.scheduler.client.report [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1242.886578] env[68437]: DEBUG nova.compute.provider_tree [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1242.898793] env[68437]: DEBUG nova.scheduler.client.report [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1242.912170] env[68437]: DEBUG nova.network.neutron [req-0df14260-e850-45d7-9a3c-28b7b3665118 req-6269ef6a-8132-4a25-b7dd-8b717f1b1901 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Updated VIF entry in instance network info cache for port 087445ab-aaa6-4608-8412-adbe25287f5e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1242.912630] env[68437]: DEBUG nova.network.neutron [req-0df14260-e850-45d7-9a3c-28b7b3665118 req-6269ef6a-8132-4a25-b7dd-8b717f1b1901 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Updating instance_info_cache with network_info: [{"id": "087445ab-aaa6-4608-8412-adbe25287f5e", "address": "fa:16:3e:ae:ff:db", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087445ab-aa", "ovs_interfaceid": "087445ab-aaa6-4608-8412-adbe25287f5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.916638] env[68437]: DEBUG nova.scheduler.client.report [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1242.986652] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: cf394b0b-cb14-4ae1-81bb-622c951bfdab] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1243.042401] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7507e1c2-02cf-4733-9d08-4bba7ab18923 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.051259] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44810ba1-8b8d-4cb4-b625-4c2dd8f83239 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.085199] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5d5ad8-de36-4218-952d-82b422ea0230 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.092757] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d39fe1-541d-d94e-6a97-20a3b746f038, 'name': SearchDatastore_Task, 'duration_secs': 0.015842} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.094779] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.095026] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1243.095264] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.095412] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1243.095588] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1243.095855] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-488f7a49-075f-4259-b3e9-d8f5ca419bac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.098626] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d038968a-d398-47d4-9964-d339f7744923 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.112705] env[68437]: DEBUG nova.compute.provider_tree [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1243.116481] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1243.116481] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1243.116481] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4839707d-0130-4ec9-a06a-eb86c35254b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.120801] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1243.120801] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52252bf7-c1ab-542e-c142-109bfd97625a" [ 1243.120801] env[68437]: _type = "Task" [ 1243.120801] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.128999] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52252bf7-c1ab-542e-c142-109bfd97625a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.415252] env[68437]: DEBUG oslo_concurrency.lockutils [req-0df14260-e850-45d7-9a3c-28b7b3665118 req-6269ef6a-8132-4a25-b7dd-8b717f1b1901 service nova] Releasing lock "refresh_cache-eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.491670] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 2f368262-0825-4ccc-9b1e-523b705bcfce] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1243.632062] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52252bf7-c1ab-542e-c142-109bfd97625a, 'name': SearchDatastore_Task, 'duration_secs': 0.03819} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.632850] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ae197b0-8412-4c1d-a01e-086999f9ccb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.638169] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1243.638169] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5202c27c-2dfc-1dd4-7db5-92f6dc32c11a" [ 1243.638169] env[68437]: _type = "Task" [ 1243.638169] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.642604] env[68437]: DEBUG nova.scheduler.client.report [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updated inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with generation 147 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1243.642889] env[68437]: DEBUG nova.compute.provider_tree [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 147 to 148 during operation: update_inventory {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1243.643172] env[68437]: DEBUG nova.compute.provider_tree [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1243.649135] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5202c27c-2dfc-1dd4-7db5-92f6dc32c11a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.674301] env[68437]: DEBUG oslo_concurrency.lockutils [None req-69083d7b-36ac-45b6-8d65-0e328af2adb9 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.239s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.994656] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 19dde8dd-eae6-41a0-b147-c505db1cda15] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1244.149596] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5202c27c-2dfc-1dd4-7db5-92f6dc32c11a, 'name': SearchDatastore_Task, 'duration_secs': 0.04844} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.150301] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.027s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.150644] env[68437]: DEBUG nova.compute.manager [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1244.153180] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.153426] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] eb5c9d63-ac51-4cef-93c9-c15119ea2ea7/eb5c9d63-ac51-4cef-93c9-c15119ea2ea7.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1244.153865] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4a024b5-4bd4-4b9f-a63a-98f83d6d88e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.162325] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1244.162325] env[68437]: value = "task-2945108" [ 1244.162325] env[68437]: _type = "Task" [ 1244.162325] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.172162] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.499249] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 1186da93-57aa-40f4-8aae-702d039844d4] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1244.655384] env[68437]: DEBUG nova.compute.utils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1244.656790] env[68437]: DEBUG nova.compute.manager [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1244.656960] env[68437]: DEBUG nova.network.neutron [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1244.673405] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945108, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.411332} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.673637] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] eb5c9d63-ac51-4cef-93c9-c15119ea2ea7/eb5c9d63-ac51-4cef-93c9-c15119ea2ea7.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1244.673852] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1244.674207] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6aedda2-fadc-4e54-8e30-b4faf6f944a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.681538] env[68437]: DEBUG oslo_concurrency.lockutils [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "8554a78c-c2d7-459d-a295-121da777dfd4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.681753] env[68437]: DEBUG oslo_concurrency.lockutils [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.681952] env[68437]: DEBUG oslo_concurrency.lockutils [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "8554a78c-c2d7-459d-a295-121da777dfd4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.682147] env[68437]: DEBUG oslo_concurrency.lockutils [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.682313] env[68437]: DEBUG oslo_concurrency.lockutils [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.683821] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1244.683821] env[68437]: value = "task-2945109" [ 1244.683821] env[68437]: _type = "Task" [ 1244.683821] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.684237] env[68437]: INFO nova.compute.manager [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Terminating instance [ 1244.697727] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945109, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.699668] env[68437]: DEBUG nova.policy [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7605d44a5b5448a3966872b4f524d13c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40d8becefc85431b9723c72aa09d152b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1244.968658] env[68437]: DEBUG nova.network.neutron [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Successfully created port: 505ca14d-2a80-4b29-bd5c-9d991541e9ad {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1245.160756] env[68437]: DEBUG nova.compute.manager [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1245.191731] env[68437]: DEBUG nova.compute.manager [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1245.192082] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1245.192975] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba70d43-90f7-483d-9fd8-f020591d49de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.199159] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945109, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060482} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.199811] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1245.200634] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfdf73e-7ae8-44e0-8418-129a133e4e72 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.205575] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1245.206139] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-373abad7-02cd-436d-b29e-1a6515c3d6c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.227361] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] eb5c9d63-ac51-4cef-93c9-c15119ea2ea7/eb5c9d63-ac51-4cef-93c9-c15119ea2ea7.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1245.229120] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bb7d430-330d-4bcc-960b-e5f483ec2317 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.243342] env[68437]: DEBUG oslo_vmware.api [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1245.243342] env[68437]: value = "task-2945110" [ 1245.243342] env[68437]: _type = "Task" [ 1245.243342] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.250297] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1245.250297] env[68437]: value = "task-2945111" [ 1245.250297] env[68437]: _type = "Task" [ 1245.250297] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.253298] env[68437]: DEBUG oslo_vmware.api [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.261278] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945111, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.666469] env[68437]: INFO nova.virt.block_device [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Booting with volume d9bee34e-2dd4-4803-aafb-0bd871f3a561 at /dev/sda [ 1245.709047] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98ef6e74-3482-457d-810d-1c317542f1ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.717778] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af62306-6d84-479a-89f7-02cba0d6b665 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.759967] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef77f537-2e8b-482f-8316-0e777df017fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.766339] env[68437]: DEBUG oslo_vmware.api [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945110, 'name': PowerOffVM_Task, 'duration_secs': 0.354154} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.769808] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1245.769988] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1245.770296] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945111, 'name': ReconfigVM_Task, 'duration_secs': 0.49402} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.770551] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9879b744-8dc1-4831-a788-586ba3f93548 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.771878] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Reconfigured VM instance instance-00000071 to attach disk [datastore2] eb5c9d63-ac51-4cef-93c9-c15119ea2ea7/eb5c9d63-ac51-4cef-93c9-c15119ea2ea7.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1245.774992] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7fb4d0-9c8a-4768-bee0-a9fb24fe6d97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.784709] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-558a3ffd-b068-41eb-bc0d-7f43605010ad {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.790629] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1245.790629] env[68437]: value = "task-2945113" [ 1245.790629] env[68437]: _type = "Task" [ 1245.790629] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.798158] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945113, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.807678] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1dc59cc-6a60-4142-a6c1-e726fa69b218 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.813578] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117ff3f8-24bd-444e-8ee0-f18e667d351d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.827466] env[68437]: DEBUG nova.virt.block_device [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating existing volume attachment record: 2f333209-489f-4eb2-8d37-1100f929d8f4 {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1245.864539] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1245.864823] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1245.864957] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleting the datastore file [datastore1] 8554a78c-c2d7-459d-a295-121da777dfd4 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1245.865250] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06c39a45-49a8-4cac-bb26-aafb492f035e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.871886] env[68437]: DEBUG oslo_vmware.api [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1245.871886] env[68437]: value = "task-2945114" [ 1245.871886] env[68437]: _type = "Task" [ 1245.871886] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.880089] env[68437]: DEBUG oslo_vmware.api [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.300734] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945113, 'name': Rename_Task, 'duration_secs': 0.165424} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.301091] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1246.301354] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5222a69a-5eab-455b-b31f-fee1577b4e0a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.308763] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1246.308763] env[68437]: value = "task-2945115" [ 1246.308763] env[68437]: _type = "Task" [ 1246.308763] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.317071] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945115, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.321070] env[68437]: DEBUG nova.compute.manager [req-eb3699c5-1904-4f2f-b782-aa837c7df297 req-ac1aff8d-b43e-4682-b558-238857d1f538 service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Received event network-vif-plugged-505ca14d-2a80-4b29-bd5c-9d991541e9ad {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1246.321180] env[68437]: DEBUG oslo_concurrency.lockutils [req-eb3699c5-1904-4f2f-b782-aa837c7df297 req-ac1aff8d-b43e-4682-b558-238857d1f538 service nova] Acquiring lock "0a246b14-5078-4549-a270-73f99a1647c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.321358] env[68437]: DEBUG oslo_concurrency.lockutils [req-eb3699c5-1904-4f2f-b782-aa837c7df297 req-ac1aff8d-b43e-4682-b558-238857d1f538 service nova] Lock "0a246b14-5078-4549-a270-73f99a1647c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.321532] env[68437]: DEBUG oslo_concurrency.lockutils [req-eb3699c5-1904-4f2f-b782-aa837c7df297 req-ac1aff8d-b43e-4682-b558-238857d1f538 service nova] Lock "0a246b14-5078-4549-a270-73f99a1647c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.321687] env[68437]: DEBUG nova.compute.manager [req-eb3699c5-1904-4f2f-b782-aa837c7df297 req-ac1aff8d-b43e-4682-b558-238857d1f538 service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] No waiting events found dispatching network-vif-plugged-505ca14d-2a80-4b29-bd5c-9d991541e9ad {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1246.321852] env[68437]: WARNING nova.compute.manager [req-eb3699c5-1904-4f2f-b782-aa837c7df297 req-ac1aff8d-b43e-4682-b558-238857d1f538 service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Received unexpected event network-vif-plugged-505ca14d-2a80-4b29-bd5c-9d991541e9ad for instance with vm_state building and task_state block_device_mapping. [ 1246.385826] env[68437]: DEBUG oslo_vmware.api [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.212271} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.386347] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1246.386536] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1246.386712] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1246.386881] env[68437]: INFO nova.compute.manager [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1246.387137] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1246.387329] env[68437]: DEBUG nova.compute.manager [-] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1246.387428] env[68437]: DEBUG nova.network.neutron [-] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1246.435071] env[68437]: DEBUG nova.network.neutron [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Successfully updated port: 505ca14d-2a80-4b29-bd5c-9d991541e9ad {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1246.820580] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945115, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.935682] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.935876] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.936046] env[68437]: DEBUG nova.network.neutron [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1247.322208] env[68437]: DEBUG nova.network.neutron [-] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.323512] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945115, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.471290] env[68437]: DEBUG nova.network.neutron [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1247.606847] env[68437]: DEBUG nova.network.neutron [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance_info_cache with network_info: [{"id": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "address": "fa:16:3e:34:9d:f3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505ca14d-2a", "ovs_interfaceid": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.821917] env[68437]: DEBUG oslo_vmware.api [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945115, 'name': PowerOnVM_Task, 'duration_secs': 1.454771} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.822204] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1247.822406] env[68437]: INFO nova.compute.manager [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Took 7.76 seconds to spawn the instance on the hypervisor. [ 1247.822582] env[68437]: DEBUG nova.compute.manager [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1247.823379] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4c3bf4-d819-4397-a7d3-c7ecf196fcea {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.825986] env[68437]: INFO nova.compute.manager [-] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Took 1.44 seconds to deallocate network for instance. [ 1247.909601] env[68437]: DEBUG nova.compute.manager [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1247.910138] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1247.910369] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.910516] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1247.910700] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.910847] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1247.910995] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1247.911222] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1247.911379] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1247.911548] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1247.911711] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1247.911883] env[68437]: DEBUG nova.virt.hardware [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1247.912716] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344ee496-29cc-4efa-89f0-e4e05b7f35f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.921084] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27905653-d5cb-42f2-8058-48c456dffea4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.109555] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.110048] env[68437]: DEBUG nova.compute.manager [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Instance network_info: |[{"id": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "address": "fa:16:3e:34:9d:f3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505ca14d-2a", "ovs_interfaceid": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1248.110349] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:9d:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '505ca14d-2a80-4b29-bd5c-9d991541e9ad', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1248.117749] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1248.117960] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1248.118198] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0b4a3c7-45fb-4e7b-bb45-9d1c8b44c5d6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.139043] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1248.139043] env[68437]: value = "task-2945116" [ 1248.139043] env[68437]: _type = "Task" [ 1248.139043] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.146456] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945116, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.335341] env[68437]: DEBUG oslo_concurrency.lockutils [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.335778] env[68437]: DEBUG oslo_concurrency.lockutils [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.336051] env[68437]: DEBUG nova.objects.instance [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'resources' on Instance uuid 8554a78c-c2d7-459d-a295-121da777dfd4 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1248.343084] env[68437]: INFO nova.compute.manager [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Took 12.52 seconds to build instance. [ 1248.351300] env[68437]: DEBUG nova.compute.manager [req-ce39ffdc-59bb-4dd9-b55b-2bfebdab962c req-be50b7ae-5166-488c-bbf9-8ea2df34d7b6 service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Received event network-changed-505ca14d-2a80-4b29-bd5c-9d991541e9ad {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1248.351500] env[68437]: DEBUG nova.compute.manager [req-ce39ffdc-59bb-4dd9-b55b-2bfebdab962c req-be50b7ae-5166-488c-bbf9-8ea2df34d7b6 service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Refreshing instance network info cache due to event network-changed-505ca14d-2a80-4b29-bd5c-9d991541e9ad. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1248.351732] env[68437]: DEBUG oslo_concurrency.lockutils [req-ce39ffdc-59bb-4dd9-b55b-2bfebdab962c req-be50b7ae-5166-488c-bbf9-8ea2df34d7b6 service nova] Acquiring lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.351936] env[68437]: DEBUG oslo_concurrency.lockutils [req-ce39ffdc-59bb-4dd9-b55b-2bfebdab962c req-be50b7ae-5166-488c-bbf9-8ea2df34d7b6 service nova] Acquired lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.352154] env[68437]: DEBUG nova.network.neutron [req-ce39ffdc-59bb-4dd9-b55b-2bfebdab962c req-be50b7ae-5166-488c-bbf9-8ea2df34d7b6 service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Refreshing network info cache for port 505ca14d-2a80-4b29-bd5c-9d991541e9ad {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1248.649084] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945116, 'name': CreateVM_Task, 'duration_secs': 0.369243} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.649282] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1248.649953] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '2f333209-489f-4eb2-8d37-1100f929d8f4', 'device_type': None, 'mount_device': '/dev/sda', 'boot_index': 0, 'disk_bus': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591146', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'name': 'volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0a246b14-5078-4549-a270-73f99a1647c7', 'attached_at': '', 'detached_at': '', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'serial': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561'}, 'delete_on_termination': True, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68437) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1248.650194] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Root volume attach. Driver type: vmdk {{(pid=68437) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1248.650979] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcda68b7-9a7c-46cf-8e5b-0c27797329f0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.659215] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f4ce57-6590-48b0-8858-b9d021aa061c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.665478] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b448a42d-269e-448d-85d2-bd3102788243 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.671794] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-f0d3bd2b-e155-4a12-bb10-e769ddd41226 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.679296] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1248.679296] env[68437]: value = "task-2945117" [ 1248.679296] env[68437]: _type = "Task" [ 1248.679296] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.687192] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945117, 'name': RelocateVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.845735] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8804ff43-5399-4015-a5e1-f768f355ced6 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.027s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.990391] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca946206-f4a7-428b-bc28-f0a2997b8ccc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.998466] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3bd978-3080-412d-bbf7-f8b120a0188e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.033226] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091fd2dc-009e-4c87-be7a-253c34c67ce7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.041159] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bcf39f-3fe0-4543-8d6a-4214f4ed8c4f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.055129] env[68437]: DEBUG nova.compute.provider_tree [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1249.100115] env[68437]: DEBUG nova.network.neutron [req-ce39ffdc-59bb-4dd9-b55b-2bfebdab962c req-be50b7ae-5166-488c-bbf9-8ea2df34d7b6 service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updated VIF entry in instance network info cache for port 505ca14d-2a80-4b29-bd5c-9d991541e9ad. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1249.100513] env[68437]: DEBUG nova.network.neutron [req-ce39ffdc-59bb-4dd9-b55b-2bfebdab962c req-be50b7ae-5166-488c-bbf9-8ea2df34d7b6 service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance_info_cache with network_info: [{"id": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "address": "fa:16:3e:34:9d:f3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505ca14d-2a", "ovs_interfaceid": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.189266] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945117, 'name': RelocateVM_Task} progress is 20%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.288641] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.288955] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.289192] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.289378] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.289548] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.291826] env[68437]: INFO nova.compute.manager [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Terminating instance [ 1249.585812] env[68437]: DEBUG nova.scheduler.client.report [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Updated inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with generation 148 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1249.586127] env[68437]: DEBUG nova.compute.provider_tree [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 148 to 149 during operation: update_inventory {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1249.586398] env[68437]: DEBUG nova.compute.provider_tree [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1249.603405] env[68437]: DEBUG oslo_concurrency.lockutils [req-ce39ffdc-59bb-4dd9-b55b-2bfebdab962c req-be50b7ae-5166-488c-bbf9-8ea2df34d7b6 service nova] Releasing lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.603615] env[68437]: DEBUG nova.compute.manager [req-ce39ffdc-59bb-4dd9-b55b-2bfebdab962c req-be50b7ae-5166-488c-bbf9-8ea2df34d7b6 service nova] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Received event network-vif-deleted-9039c1ee-0c1c-4720-9e53-0b8b2f1a6b92 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1249.690128] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945117, 'name': RelocateVM_Task} progress is 20%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.795967] env[68437]: DEBUG nova.compute.manager [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1249.796265] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1249.797708] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29e96f6-531b-477f-8a13-5d20c1c43d25 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.806932] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1249.807212] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0236ea27-3392-4cbc-8283-7966619baff0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.815850] env[68437]: DEBUG oslo_vmware.api [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1249.815850] env[68437]: value = "task-2945118" [ 1249.815850] env[68437]: _type = "Task" [ 1249.815850] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.825456] env[68437]: DEBUG oslo_vmware.api [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.091705] env[68437]: DEBUG oslo_concurrency.lockutils [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.756s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.108802] env[68437]: INFO nova.scheduler.client.report [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleted allocations for instance 8554a78c-c2d7-459d-a295-121da777dfd4 [ 1250.190125] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945117, 'name': RelocateVM_Task, 'duration_secs': 1.418317} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.190409] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1250.190632] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591146', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'name': 'volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0a246b14-5078-4549-a270-73f99a1647c7', 'attached_at': '', 'detached_at': '', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'serial': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1250.191399] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2f3bf3-8122-4947-a776-271c8a43312e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.206524] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73e0246-21af-43a2-9116-f7d2947d6f79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.231367] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561/volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1250.231625] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa3ed1a3-ac41-41c1-a2c0-64df6f385478 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.250962] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1250.250962] env[68437]: value = "task-2945119" [ 1250.250962] env[68437]: _type = "Task" [ 1250.250962] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.258330] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945119, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.326020] env[68437]: DEBUG oslo_vmware.api [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945118, 'name': PowerOffVM_Task, 'duration_secs': 0.208765} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.326310] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1250.326482] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1250.326717] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-918579aa-6a99-4349-8a11-16b7f954ef38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.394914] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1250.395175] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1250.395358] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleting the datastore file [datastore2] eb5c9d63-ac51-4cef-93c9-c15119ea2ea7 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1250.395664] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87ede021-3f81-414c-bf1a-043f6401c378 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.402608] env[68437]: DEBUG oslo_vmware.api [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1250.402608] env[68437]: value = "task-2945121" [ 1250.402608] env[68437]: _type = "Task" [ 1250.402608] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.410856] env[68437]: DEBUG oslo_vmware.api [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945121, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.616030] env[68437]: DEBUG oslo_concurrency.lockutils [None req-471d0053-d5d9-4451-ae49-c6a5b349b967 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "8554a78c-c2d7-459d-a295-121da777dfd4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.934s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.761506] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945119, 'name': ReconfigVM_Task, 'duration_secs': 0.3008} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.761737] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Reconfigured VM instance instance-00000072 to attach disk [datastore1] volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561/volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1250.766445] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c8e2079-1fcf-4a7b-9b14-2462733cfb1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.781202] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1250.781202] env[68437]: value = "task-2945122" [ 1250.781202] env[68437]: _type = "Task" [ 1250.781202] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.789313] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945122, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.915442] env[68437]: DEBUG oslo_vmware.api [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945121, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266671} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.915742] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1250.915931] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1250.916122] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1250.916302] env[68437]: INFO nova.compute.manager [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1250.916553] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1250.916753] env[68437]: DEBUG nova.compute.manager [-] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1250.916848] env[68437]: DEBUG nova.network.neutron [-] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1251.218249] env[68437]: DEBUG nova.compute.manager [req-bac41304-d322-4d76-a33c-3c8baea1cba0 req-f5d887a4-3c52-4b08-a2ba-810f601bac82 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Received event network-vif-deleted-087445ab-aaa6-4608-8412-adbe25287f5e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1251.218624] env[68437]: INFO nova.compute.manager [req-bac41304-d322-4d76-a33c-3c8baea1cba0 req-f5d887a4-3c52-4b08-a2ba-810f601bac82 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Neutron deleted interface 087445ab-aaa6-4608-8412-adbe25287f5e; detaching it from the instance and deleting it from the info cache [ 1251.218624] env[68437]: DEBUG nova.network.neutron [req-bac41304-d322-4d76-a33c-3c8baea1cba0 req-f5d887a4-3c52-4b08-a2ba-810f601bac82 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.292399] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945122, 'name': ReconfigVM_Task, 'duration_secs': 0.146251} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.292773] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591146', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'name': 'volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0a246b14-5078-4549-a270-73f99a1647c7', 'attached_at': '', 'detached_at': '', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'serial': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1251.293359] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e17ce3cd-c29a-493c-8fbc-26baa4012f9c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.300870] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1251.300870] env[68437]: value = "task-2945123" [ 1251.300870] env[68437]: _type = "Task" [ 1251.300870] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.309710] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945123, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.698841] env[68437]: DEBUG nova.network.neutron [-] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.721662] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-211c7d0c-069e-47a3-9984-8b50237b6002 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.732158] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b00848-7143-4f9b-8e20-38909b39677e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.765902] env[68437]: DEBUG nova.compute.manager [req-bac41304-d322-4d76-a33c-3c8baea1cba0 req-f5d887a4-3c52-4b08-a2ba-810f601bac82 service nova] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Detach interface failed, port_id=087445ab-aaa6-4608-8412-adbe25287f5e, reason: Instance eb5c9d63-ac51-4cef-93c9-c15119ea2ea7 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1251.810032] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945123, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.201734] env[68437]: INFO nova.compute.manager [-] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Took 1.28 seconds to deallocate network for instance. [ 1252.311139] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945123, 'name': Rename_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.707797] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.708127] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.708402] env[68437]: DEBUG nova.objects.instance [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lazy-loading 'resources' on Instance uuid eb5c9d63-ac51-4cef-93c9-c15119ea2ea7 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1252.812089] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945123, 'name': Rename_Task, 'duration_secs': 1.136896} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.812417] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1252.812687] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab7a4a9f-a0cb-41ca-8f6d-6ebd96476f23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.820637] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1252.820637] env[68437]: value = "task-2945124" [ 1252.820637] env[68437]: _type = "Task" [ 1252.820637] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.828697] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.893832] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "7705f1c5-3b96-426c-9553-b67f2951825b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.894104] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1253.332978] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945124, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.362540] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee25d8b0-914a-46f2-9fa7-4572749820a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.370906] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfd2a3e-c82f-46a8-9ef6-f78ce14287e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.404415] env[68437]: DEBUG nova.compute.manager [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1253.408170] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82907b85-0d43-4652-81db-eda3ae5563b3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.418477] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d811968b-fe3a-4c8e-af91-cde9bd8d90e9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.434857] env[68437]: DEBUG nova.compute.provider_tree [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1253.833942] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945124, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.929719] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1253.937970] env[68437]: DEBUG nova.scheduler.client.report [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1254.338211] env[68437]: DEBUG oslo_vmware.api [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945124, 'name': PowerOnVM_Task, 'duration_secs': 1.30488} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.338623] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1254.338940] env[68437]: INFO nova.compute.manager [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Took 6.43 seconds to spawn the instance on the hypervisor. [ 1254.339245] env[68437]: DEBUG nova.compute.manager [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1254.340438] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab4753c-8fa4-4993-9641-077e1317ec71 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.443070] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.735s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.446079] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.516s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.447453] env[68437]: INFO nova.compute.claims [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1254.466360] env[68437]: INFO nova.scheduler.client.report [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted allocations for instance eb5c9d63-ac51-4cef-93c9-c15119ea2ea7 [ 1254.861084] env[68437]: INFO nova.compute.manager [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Took 13.76 seconds to build instance. [ 1254.973363] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8c405a7c-fa87-45a3-a66e-197085fadd58 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "eb5c9d63-ac51-4cef-93c9-c15119ea2ea7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.684s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.363698] env[68437]: DEBUG oslo_concurrency.lockutils [None req-03f8680f-c72c-4086-bd46-ca0bea62ad56 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.273s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.593558] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f8f2f3-0648-4471-8fd7-08422d5ce9c0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.602986] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd976bb1-2e72-4243-b222-b5f0474a300a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.637742] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186870cd-4eeb-42d3-a86d-a6c61cf26b75 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.646147] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390fe492-34b6-4613-9e19-dc36439ed75b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.660364] env[68437]: DEBUG nova.compute.provider_tree [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.664746] env[68437]: DEBUG nova.compute.manager [req-6427a8c0-79db-4dfa-927c-7ada66dc1794 req-ef6e9261-a9a9-4ef5-8887-49a48a56a786 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Received event network-changed-28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1255.664930] env[68437]: DEBUG nova.compute.manager [req-6427a8c0-79db-4dfa-927c-7ada66dc1794 req-ef6e9261-a9a9-4ef5-8887-49a48a56a786 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Refreshing instance network info cache due to event network-changed-28d9f2cf-baaf-4817-acdb-525b41381e45. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1255.665153] env[68437]: DEBUG oslo_concurrency.lockutils [req-6427a8c0-79db-4dfa-927c-7ada66dc1794 req-ef6e9261-a9a9-4ef5-8887-49a48a56a786 service nova] Acquiring lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.665293] env[68437]: DEBUG oslo_concurrency.lockutils [req-6427a8c0-79db-4dfa-927c-7ada66dc1794 req-ef6e9261-a9a9-4ef5-8887-49a48a56a786 service nova] Acquired lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.665459] env[68437]: DEBUG nova.network.neutron [req-6427a8c0-79db-4dfa-927c-7ada66dc1794 req-ef6e9261-a9a9-4ef5-8887-49a48a56a786 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Refreshing network info cache for port 28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1256.163626] env[68437]: DEBUG nova.scheduler.client.report [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1256.360881] env[68437]: DEBUG nova.network.neutron [req-6427a8c0-79db-4dfa-927c-7ada66dc1794 req-ef6e9261-a9a9-4ef5-8887-49a48a56a786 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Updated VIF entry in instance network info cache for port 28d9f2cf-baaf-4817-acdb-525b41381e45. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1256.361250] env[68437]: DEBUG nova.network.neutron [req-6427a8c0-79db-4dfa-927c-7ada66dc1794 req-ef6e9261-a9a9-4ef5-8887-49a48a56a786 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Updating instance_info_cache with network_info: [{"id": "28d9f2cf-baaf-4817-acdb-525b41381e45", "address": "fa:16:3e:78:2e:6c", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28d9f2cf-ba", "ovs_interfaceid": "28d9f2cf-baaf-4817-acdb-525b41381e45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.362583] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "2ac0c165-a898-4d23-a346-2567921caf1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.362738] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "2ac0c165-a898-4d23-a346-2567921caf1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1256.668505] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.669141] env[68437]: DEBUG nova.compute.manager [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1256.864723] env[68437]: DEBUG oslo_concurrency.lockutils [req-6427a8c0-79db-4dfa-927c-7ada66dc1794 req-ef6e9261-a9a9-4ef5-8887-49a48a56a786 service nova] Releasing lock "refresh_cache-353ebb37-7e69-49d4-873e-2272cbfff6e8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.865260] env[68437]: DEBUG nova.compute.manager [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1257.056540] env[68437]: DEBUG nova.compute.manager [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Stashing vm_state: active {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1257.174656] env[68437]: DEBUG nova.compute.utils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1257.175965] env[68437]: DEBUG nova.compute.manager [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1257.176155] env[68437]: DEBUG nova.network.neutron [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1257.211387] env[68437]: DEBUG nova.policy [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6801cab23bf4aadb8d7f326f0643c32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73e8290afeb84bf3976cfa22d3452ca7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1257.389344] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.389639] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.391123] env[68437]: INFO nova.compute.claims [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1257.473738] env[68437]: DEBUG nova.network.neutron [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Successfully created port: e681ca7d-a952-4802-bfde-864f7a8362b2 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1257.574776] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.680082] env[68437]: DEBUG nova.compute.manager [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1257.699564] env[68437]: DEBUG nova.compute.manager [req-496d3b0d-fdd0-4fae-baeb-b7369fd24f00 req-04d7083a-b308-429e-9dd9-7d1a1e6d9caa service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Received event network-changed-505ca14d-2a80-4b29-bd5c-9d991541e9ad {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1257.699769] env[68437]: DEBUG nova.compute.manager [req-496d3b0d-fdd0-4fae-baeb-b7369fd24f00 req-04d7083a-b308-429e-9dd9-7d1a1e6d9caa service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Refreshing instance network info cache due to event network-changed-505ca14d-2a80-4b29-bd5c-9d991541e9ad. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1257.699986] env[68437]: DEBUG oslo_concurrency.lockutils [req-496d3b0d-fdd0-4fae-baeb-b7369fd24f00 req-04d7083a-b308-429e-9dd9-7d1a1e6d9caa service nova] Acquiring lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.700134] env[68437]: DEBUG oslo_concurrency.lockutils [req-496d3b0d-fdd0-4fae-baeb-b7369fd24f00 req-04d7083a-b308-429e-9dd9-7d1a1e6d9caa service nova] Acquired lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1257.700300] env[68437]: DEBUG nova.network.neutron [req-496d3b0d-fdd0-4fae-baeb-b7369fd24f00 req-04d7083a-b308-429e-9dd9-7d1a1e6d9caa service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Refreshing network info cache for port 505ca14d-2a80-4b29-bd5c-9d991541e9ad {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1258.424190] env[68437]: DEBUG nova.network.neutron [req-496d3b0d-fdd0-4fae-baeb-b7369fd24f00 req-04d7083a-b308-429e-9dd9-7d1a1e6d9caa service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updated VIF entry in instance network info cache for port 505ca14d-2a80-4b29-bd5c-9d991541e9ad. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1258.424736] env[68437]: DEBUG nova.network.neutron [req-496d3b0d-fdd0-4fae-baeb-b7369fd24f00 req-04d7083a-b308-429e-9dd9-7d1a1e6d9caa service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance_info_cache with network_info: [{"id": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "address": "fa:16:3e:34:9d:f3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505ca14d-2a", "ovs_interfaceid": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.526738] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51677178-ef3e-433d-a33b-3b2c50708e99 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.535880] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fc91d4-3b35-4eb2-ad4d-c347918a5fa0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.564862] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4749c1-7c45-47dc-b79f-734693b7c818 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.571791] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbba457-3c71-4726-9318-306d73401265 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.585250] env[68437]: DEBUG nova.compute.provider_tree [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.690080] env[68437]: DEBUG nova.compute.manager [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1258.717342] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1258.717622] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1258.717785] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1258.717984] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1258.718139] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1258.718289] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1258.718537] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1258.718704] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1258.718875] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1258.719049] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1258.719253] env[68437]: DEBUG nova.virt.hardware [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1258.720213] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a3f9ba-8deb-4e78-a118-c7fbc9501adb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.729976] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7ff893-33f7-4b7c-8d0c-efbd94b47896 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.838439] env[68437]: DEBUG nova.compute.manager [req-e6d07b88-9786-46fe-a0d2-ce75b3524a72 req-659041b5-2c45-4d85-952d-659a88d69ca7 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Received event network-vif-plugged-e681ca7d-a952-4802-bfde-864f7a8362b2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1258.838666] env[68437]: DEBUG oslo_concurrency.lockutils [req-e6d07b88-9786-46fe-a0d2-ce75b3524a72 req-659041b5-2c45-4d85-952d-659a88d69ca7 service nova] Acquiring lock "7705f1c5-3b96-426c-9553-b67f2951825b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.838871] env[68437]: DEBUG oslo_concurrency.lockutils [req-e6d07b88-9786-46fe-a0d2-ce75b3524a72 req-659041b5-2c45-4d85-952d-659a88d69ca7 service nova] Lock "7705f1c5-3b96-426c-9553-b67f2951825b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.839051] env[68437]: DEBUG oslo_concurrency.lockutils [req-e6d07b88-9786-46fe-a0d2-ce75b3524a72 req-659041b5-2c45-4d85-952d-659a88d69ca7 service nova] Lock "7705f1c5-3b96-426c-9553-b67f2951825b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.839225] env[68437]: DEBUG nova.compute.manager [req-e6d07b88-9786-46fe-a0d2-ce75b3524a72 req-659041b5-2c45-4d85-952d-659a88d69ca7 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] No waiting events found dispatching network-vif-plugged-e681ca7d-a952-4802-bfde-864f7a8362b2 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1258.839387] env[68437]: WARNING nova.compute.manager [req-e6d07b88-9786-46fe-a0d2-ce75b3524a72 req-659041b5-2c45-4d85-952d-659a88d69ca7 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Received unexpected event network-vif-plugged-e681ca7d-a952-4802-bfde-864f7a8362b2 for instance with vm_state building and task_state spawning. [ 1258.928783] env[68437]: DEBUG oslo_concurrency.lockutils [req-496d3b0d-fdd0-4fae-baeb-b7369fd24f00 req-04d7083a-b308-429e-9dd9-7d1a1e6d9caa service nova] Releasing lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1258.933487] env[68437]: DEBUG nova.network.neutron [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Successfully updated port: e681ca7d-a952-4802-bfde-864f7a8362b2 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1259.088778] env[68437]: DEBUG nova.scheduler.client.report [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1259.444250] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "refresh_cache-7705f1c5-3b96-426c-9553-b67f2951825b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.444535] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "refresh_cache-7705f1c5-3b96-426c-9553-b67f2951825b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1259.444587] env[68437]: DEBUG nova.network.neutron [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1259.593183] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.203s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.593801] env[68437]: DEBUG nova.compute.manager [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1259.596788] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.023s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.005349] env[68437]: DEBUG nova.network.neutron [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1260.100355] env[68437]: DEBUG nova.compute.utils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1260.103677] env[68437]: INFO nova.compute.claims [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1260.109507] env[68437]: DEBUG nova.compute.manager [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1260.109675] env[68437]: DEBUG nova.network.neutron [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1260.165012] env[68437]: DEBUG nova.network.neutron [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Updating instance_info_cache with network_info: [{"id": "e681ca7d-a952-4802-bfde-864f7a8362b2", "address": "fa:16:3e:a5:4c:91", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape681ca7d-a9", "ovs_interfaceid": "e681ca7d-a952-4802-bfde-864f7a8362b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.170375] env[68437]: DEBUG nova.policy [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f745cbd7edb641af8623447b00021ac6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4b7dfebb79e54e4fba7e0b142f99d7eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1260.419734] env[68437]: DEBUG nova.network.neutron [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Successfully created port: a944b19d-2c14-4969-9dfb-c1003e5c743b {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1260.608731] env[68437]: DEBUG nova.compute.manager [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1260.612863] env[68437]: INFO nova.compute.resource_tracker [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating resource usage from migration 28bc1daf-99ee-4cc6-9ea9-7809d0333e47 [ 1260.667693] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "refresh_cache-7705f1c5-3b96-426c-9553-b67f2951825b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1260.668057] env[68437]: DEBUG nova.compute.manager [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Instance network_info: |[{"id": "e681ca7d-a952-4802-bfde-864f7a8362b2", "address": "fa:16:3e:a5:4c:91", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape681ca7d-a9", "ovs_interfaceid": "e681ca7d-a952-4802-bfde-864f7a8362b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1260.668467] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:4c:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '305ccd93-08cb-4658-845c-d9b64952daf7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e681ca7d-a952-4802-bfde-864f7a8362b2', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1260.676073] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1260.678727] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1260.679140] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-098b32cc-4676-4c59-9675-d9cceec03874 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.702223] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1260.702223] env[68437]: value = "task-2945125" [ 1260.702223] env[68437]: _type = "Task" [ 1260.702223] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.712864] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945125, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.854765] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b58544-711d-4ac3-9c17-14f1ac0a0927 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.862657] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c6143f-9307-4d9c-89d5-0ffdf669f887 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.869049] env[68437]: DEBUG nova.compute.manager [req-94672489-5140-40f8-8f6c-a6f7e6a710b1 req-b3c8e95f-4e6d-4d8b-9d4a-28af91157500 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Received event network-changed-e681ca7d-a952-4802-bfde-864f7a8362b2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1260.869248] env[68437]: DEBUG nova.compute.manager [req-94672489-5140-40f8-8f6c-a6f7e6a710b1 req-b3c8e95f-4e6d-4d8b-9d4a-28af91157500 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Refreshing instance network info cache due to event network-changed-e681ca7d-a952-4802-bfde-864f7a8362b2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1260.869456] env[68437]: DEBUG oslo_concurrency.lockutils [req-94672489-5140-40f8-8f6c-a6f7e6a710b1 req-b3c8e95f-4e6d-4d8b-9d4a-28af91157500 service nova] Acquiring lock "refresh_cache-7705f1c5-3b96-426c-9553-b67f2951825b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.869596] env[68437]: DEBUG oslo_concurrency.lockutils [req-94672489-5140-40f8-8f6c-a6f7e6a710b1 req-b3c8e95f-4e6d-4d8b-9d4a-28af91157500 service nova] Acquired lock "refresh_cache-7705f1c5-3b96-426c-9553-b67f2951825b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1260.869755] env[68437]: DEBUG nova.network.neutron [req-94672489-5140-40f8-8f6c-a6f7e6a710b1 req-b3c8e95f-4e6d-4d8b-9d4a-28af91157500 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Refreshing network info cache for port e681ca7d-a952-4802-bfde-864f7a8362b2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1260.905022] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562721e7-7006-4b13-bfdd-f60773574604 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.913812] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb0f144-289a-42b8-9189-08c59a4ffacb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.930792] env[68437]: DEBUG nova.compute.provider_tree [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.212584] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945125, 'name': CreateVM_Task, 'duration_secs': 0.357901} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.212752] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1261.213467] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.213633] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.213943] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1261.214205] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee934785-40e5-495e-b503-13a8dc6d5244 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.218655] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1261.218655] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f1557e-f1f5-9e39-09bf-dfc531fa2678" [ 1261.218655] env[68437]: _type = "Task" [ 1261.218655] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.227726] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f1557e-f1f5-9e39-09bf-dfc531fa2678, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.436475] env[68437]: DEBUG nova.scheduler.client.report [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1261.558131] env[68437]: DEBUG nova.network.neutron [req-94672489-5140-40f8-8f6c-a6f7e6a710b1 req-b3c8e95f-4e6d-4d8b-9d4a-28af91157500 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Updated VIF entry in instance network info cache for port e681ca7d-a952-4802-bfde-864f7a8362b2. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1261.558526] env[68437]: DEBUG nova.network.neutron [req-94672489-5140-40f8-8f6c-a6f7e6a710b1 req-b3c8e95f-4e6d-4d8b-9d4a-28af91157500 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Updating instance_info_cache with network_info: [{"id": "e681ca7d-a952-4802-bfde-864f7a8362b2", "address": "fa:16:3e:a5:4c:91", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape681ca7d-a9", "ovs_interfaceid": "e681ca7d-a952-4802-bfde-864f7a8362b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.618426] env[68437]: DEBUG nova.compute.manager [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1261.645339] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1261.645585] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1261.645775] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1261.645959] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1261.646117] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1261.646264] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1261.646472] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1261.646630] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1261.646795] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1261.646958] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1261.647145] env[68437]: DEBUG nova.virt.hardware [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1261.648011] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36d6f95-a90b-462e-ade0-2ccf8b088a1a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.656215] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22ae188-3c90-4a9d-9fe9-a15ab780e803 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.729826] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f1557e-f1f5-9e39-09bf-dfc531fa2678, 'name': SearchDatastore_Task, 'duration_secs': 0.012902} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.729964] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1261.730176] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1261.730410] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.730554] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.730729] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1261.730981] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-830ca2d3-f79b-4f27-bdbe-988fb34888fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.739813] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1261.739979] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1261.740667] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3198a1c-90a8-4ba2-a70a-588e3bd66f68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.745991] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1261.745991] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5299c0c0-710f-0a41-39ac-19d36787c74f" [ 1261.745991] env[68437]: _type = "Task" [ 1261.745991] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.754220] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5299c0c0-710f-0a41-39ac-19d36787c74f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.839278] env[68437]: DEBUG nova.network.neutron [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Successfully updated port: a944b19d-2c14-4969-9dfb-c1003e5c743b {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1261.941456] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.344s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.941696] env[68437]: INFO nova.compute.manager [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Migrating [ 1262.061374] env[68437]: DEBUG oslo_concurrency.lockutils [req-94672489-5140-40f8-8f6c-a6f7e6a710b1 req-b3c8e95f-4e6d-4d8b-9d4a-28af91157500 service nova] Releasing lock "refresh_cache-7705f1c5-3b96-426c-9553-b67f2951825b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.258273] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5299c0c0-710f-0a41-39ac-19d36787c74f, 'name': SearchDatastore_Task, 'duration_secs': 0.031169} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.259079] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2488cc00-50bb-4be5-9e70-a5c0e9073435 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.265089] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1262.265089] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d5acaf-90c0-9b2f-33c5-3d1a0f22a7ea" [ 1262.265089] env[68437]: _type = "Task" [ 1262.265089] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.272714] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d5acaf-90c0-9b2f-33c5-3d1a0f22a7ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.343541] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "refresh_cache-2ac0c165-a898-4d23-a346-2567921caf1b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1262.343649] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "refresh_cache-2ac0c165-a898-4d23-a346-2567921caf1b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1262.343798] env[68437]: DEBUG nova.network.neutron [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1262.455871] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1262.456058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1262.456623] env[68437]: DEBUG nova.network.neutron [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1262.683771] env[68437]: DEBUG oslo_concurrency.lockutils [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.684144] env[68437]: DEBUG oslo_concurrency.lockutils [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.776065] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d5acaf-90c0-9b2f-33c5-3d1a0f22a7ea, 'name': SearchDatastore_Task, 'duration_secs': 0.057737} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.776266] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1262.776545] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 7705f1c5-3b96-426c-9553-b67f2951825b/7705f1c5-3b96-426c-9553-b67f2951825b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1262.776798] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a81d1cb1-c0cf-47b8-b6e4-df9ebdfb7903 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.783466] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1262.783466] env[68437]: value = "task-2945126" [ 1262.783466] env[68437]: _type = "Task" [ 1262.783466] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.791405] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.890838] env[68437]: DEBUG nova.network.neutron [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1262.901850] env[68437]: DEBUG nova.compute.manager [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Received event network-vif-plugged-a944b19d-2c14-4969-9dfb-c1003e5c743b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1262.902250] env[68437]: DEBUG oslo_concurrency.lockutils [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] Acquiring lock "2ac0c165-a898-4d23-a346-2567921caf1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.902378] env[68437]: DEBUG oslo_concurrency.lockutils [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] Lock "2ac0c165-a898-4d23-a346-2567921caf1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.902478] env[68437]: DEBUG oslo_concurrency.lockutils [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] Lock "2ac0c165-a898-4d23-a346-2567921caf1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.902625] env[68437]: DEBUG nova.compute.manager [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] No waiting events found dispatching network-vif-plugged-a944b19d-2c14-4969-9dfb-c1003e5c743b {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1262.902790] env[68437]: WARNING nova.compute.manager [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Received unexpected event network-vif-plugged-a944b19d-2c14-4969-9dfb-c1003e5c743b for instance with vm_state building and task_state spawning. [ 1262.902948] env[68437]: DEBUG nova.compute.manager [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Received event network-changed-a944b19d-2c14-4969-9dfb-c1003e5c743b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1262.903180] env[68437]: DEBUG nova.compute.manager [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Refreshing instance network info cache due to event network-changed-a944b19d-2c14-4969-9dfb-c1003e5c743b. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1262.903421] env[68437]: DEBUG oslo_concurrency.lockutils [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] Acquiring lock "refresh_cache-2ac0c165-a898-4d23-a346-2567921caf1b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.186849] env[68437]: INFO nova.compute.manager [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Detaching volume a6d824b6-bc69-4e16-83a3-57fdea03f8a2 [ 1263.230135] env[68437]: INFO nova.virt.block_device [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Attempting to driver detach volume a6d824b6-bc69-4e16-83a3-57fdea03f8a2 from mountpoint /dev/sdb [ 1263.230437] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1263.230651] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591140', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'name': 'volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'aff861ed-e792-480a-811e-c157c0606d08', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'serial': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1263.235025] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba7d148-737f-4769-a596-dd045fe3c767 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.254979] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16162a88-d4a1-4ea1-b950-713f3601ad98 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.263436] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee92be2b-07ac-4a24-ae69-2aa55153291a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.289293] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c90d18a-d708-43c3-8e88-6a885e6047d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.298140] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945126, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.308661] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] The volume has not been displaced from its original location: [datastore2] volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2/volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1263.313883] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1263.316604] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48fa0421-7191-41f1-9d1e-b18d5c2100a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.329675] env[68437]: DEBUG nova.network.neutron [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Updating instance_info_cache with network_info: [{"id": "a944b19d-2c14-4969-9dfb-c1003e5c743b", "address": "fa:16:3e:63:7c:25", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa944b19d-2c", "ovs_interfaceid": "a944b19d-2c14-4969-9dfb-c1003e5c743b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.337811] env[68437]: DEBUG oslo_vmware.api [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1263.337811] env[68437]: value = "task-2945127" [ 1263.337811] env[68437]: _type = "Task" [ 1263.337811] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.348798] env[68437]: DEBUG oslo_vmware.api [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945127, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.469786] env[68437]: DEBUG nova.network.neutron [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance_info_cache with network_info: [{"id": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "address": "fa:16:3e:34:9d:f3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505ca14d-2a", "ovs_interfaceid": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.795693] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945126, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540458} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.796057] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 7705f1c5-3b96-426c-9553-b67f2951825b/7705f1c5-3b96-426c-9553-b67f2951825b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1263.796202] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1263.796449] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8fea7e3-f062-4dfe-b95b-b795f66353f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.802776] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1263.802776] env[68437]: value = "task-2945128" [ 1263.802776] env[68437]: _type = "Task" [ 1263.802776] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.811425] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945128, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.832011] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "refresh_cache-2ac0c165-a898-4d23-a346-2567921caf1b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1263.832330] env[68437]: DEBUG nova.compute.manager [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Instance network_info: |[{"id": "a944b19d-2c14-4969-9dfb-c1003e5c743b", "address": "fa:16:3e:63:7c:25", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa944b19d-2c", "ovs_interfaceid": "a944b19d-2c14-4969-9dfb-c1003e5c743b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1263.832614] env[68437]: DEBUG oslo_concurrency.lockutils [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] Acquired lock "refresh_cache-2ac0c165-a898-4d23-a346-2567921caf1b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1263.832791] env[68437]: DEBUG nova.network.neutron [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Refreshing network info cache for port a944b19d-2c14-4969-9dfb-c1003e5c743b {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1263.834010] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:7c:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a944b19d-2c14-4969-9dfb-c1003e5c743b', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1263.842391] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1263.843485] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1263.846474] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e6cc818-95d2-4d2a-b7c5-cdd6d2db8698 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.868144] env[68437]: DEBUG oslo_vmware.api [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945127, 'name': ReconfigVM_Task, 'duration_secs': 0.260397} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.869517] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1263.873947] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1263.873947] env[68437]: value = "task-2945129" [ 1263.873947] env[68437]: _type = "Task" [ 1263.873947] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.874190] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10df1629-5a7b-485f-871e-580239347a6e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.895984] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945129, 'name': CreateVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.896994] env[68437]: DEBUG oslo_vmware.api [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1263.896994] env[68437]: value = "task-2945130" [ 1263.896994] env[68437]: _type = "Task" [ 1263.896994] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.972535] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1264.312734] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945128, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086402} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.313072] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1264.313906] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4b1c46-b766-4897-9503-8584ad129d23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.335129] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 7705f1c5-3b96-426c-9553-b67f2951825b/7705f1c5-3b96-426c-9553-b67f2951825b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1264.335385] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d414aac-99fd-4a72-9442-ad44fee9b187 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.356589] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1264.356589] env[68437]: value = "task-2945131" [ 1264.356589] env[68437]: _type = "Task" [ 1264.356589] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.364313] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945131, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.395075] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945129, 'name': CreateVM_Task, 'duration_secs': 0.421776} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.395259] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1264.395906] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.396084] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.396395] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1264.396644] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f1a1b36-86c9-42ff-838d-1a552f78c6f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.403500] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1264.403500] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c4f756-a8a6-7d59-ac3f-f8c56a9e8c33" [ 1264.403500] env[68437]: _type = "Task" [ 1264.403500] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.407243] env[68437]: DEBUG oslo_vmware.api [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945130, 'name': ReconfigVM_Task, 'duration_secs': 0.149775} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.410257] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591140', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'name': 'volume-a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'aff861ed-e792-480a-811e-c157c0606d08', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2', 'serial': 'a6d824b6-bc69-4e16-83a3-57fdea03f8a2'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1264.419969] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c4f756-a8a6-7d59-ac3f-f8c56a9e8c33, 'name': SearchDatastore_Task, 'duration_secs': 0.01125} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.420281] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1264.420535] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1264.420774] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.420919] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.421108] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1264.421410] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6907e63-fdb5-4609-996b-580f7bcde20f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.429315] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1264.429496] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1264.432150] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c7691b7-7b4b-482b-a562-768c94758744 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.437724] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1264.437724] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e1817d-56ef-61c2-1d5f-3d7926979eb1" [ 1264.437724] env[68437]: _type = "Task" [ 1264.437724] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.444759] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e1817d-56ef-61c2-1d5f-3d7926979eb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.544976] env[68437]: DEBUG nova.network.neutron [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Updated VIF entry in instance network info cache for port a944b19d-2c14-4969-9dfb-c1003e5c743b. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1264.545355] env[68437]: DEBUG nova.network.neutron [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Updating instance_info_cache with network_info: [{"id": "a944b19d-2c14-4969-9dfb-c1003e5c743b", "address": "fa:16:3e:63:7c:25", "network": {"id": "602213fe-0678-4681-9d26-38c4bd330129", "bridge": "br-int", "label": "tempest-ServersTestJSON-1160405042-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4b7dfebb79e54e4fba7e0b142f99d7eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa944b19d-2c", "ovs_interfaceid": "a944b19d-2c14-4969-9dfb-c1003e5c743b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.853559] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.865903] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945131, 'name': ReconfigVM_Task, 'duration_secs': 0.291886} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.866165] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 7705f1c5-3b96-426c-9553-b67f2951825b/7705f1c5-3b96-426c-9553-b67f2951825b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1264.866743] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3993fd39-8bfc-4dd2-a447-f65f87579816 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.873260] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1264.873260] env[68437]: value = "task-2945132" [ 1264.873260] env[68437]: _type = "Task" [ 1264.873260] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.883865] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945132, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.948239] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e1817d-56ef-61c2-1d5f-3d7926979eb1, 'name': SearchDatastore_Task, 'duration_secs': 0.008988} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.949123] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e451d98b-056d-467e-8e59-b5540c6d7331 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.953378] env[68437]: DEBUG nova.objects.instance [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'flavor' on Instance uuid aff861ed-e792-480a-811e-c157c0606d08 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1264.955865] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1264.955865] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5297fd47-f54b-679c-3fc5-3d73e4340e78" [ 1264.955865] env[68437]: _type = "Task" [ 1264.955865] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.964634] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5297fd47-f54b-679c-3fc5-3d73e4340e78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.047912] env[68437]: DEBUG oslo_concurrency.lockutils [req-03a9cc92-c475-4cd5-a4a1-6ba931c0b2bf req-3177fbe5-3e89-4307-a7cb-c82348726d33 service nova] Releasing lock "refresh_cache-2ac0c165-a898-4d23-a346-2567921caf1b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.358481] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Getting list of instances from cluster (obj){ [ 1265.358481] env[68437]: value = "domain-c8" [ 1265.358481] env[68437]: _type = "ClusterComputeResource" [ 1265.358481] env[68437]: } {{(pid=68437) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1265.359284] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6c49eb-459f-493b-a2a3-242677675836 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.378567] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Got total of 10 instances {{(pid=68437) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1265.378761] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid 353ebb37-7e69-49d4-873e-2272cbfff6e8 {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.378948] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid e2143e07-8c8d-4008-bb73-29aae91baee7 {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.379120] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid 191b441c-2c9f-48f9-b83a-d539722e6375 {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.379275] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid aff861ed-e792-480a-811e-c157c0606d08 {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.379427] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid e81e633d-34a6-443d-a2fe-95e6d8afa552 {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.379578] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid ede62837-4ff5-44be-a015-9ea06b9126a5 {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.379722] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid 524c1b76-3563-482d-a676-26fa6c28a3c7 {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.379951] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid 0a246b14-5078-4549-a270-73f99a1647c7 {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.380128] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid 7705f1c5-3b96-426c-9553-b67f2951825b {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.380275] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Triggering sync for uuid 2ac0c165-a898-4d23-a346-2567921caf1b {{(pid=68437) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1265.383254] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "353ebb37-7e69-49d4-873e-2272cbfff6e8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.383499] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.383754] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "e2143e07-8c8d-4008-bb73-29aae91baee7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.383939] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.384178] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "191b441c-2c9f-48f9-b83a-d539722e6375" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.384355] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "191b441c-2c9f-48f9-b83a-d539722e6375" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.384597] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.384801] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "e81e633d-34a6-443d-a2fe-95e6d8afa552" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.384973] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.385213] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "ede62837-4ff5-44be-a015-9ea06b9126a5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.385389] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.385772] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "524c1b76-3563-482d-a676-26fa6c28a3c7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.385954] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.386189] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "0a246b14-5078-4549-a270-73f99a1647c7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.386370] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "0a246b14-5078-4549-a270-73f99a1647c7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.386525] env[68437]: INFO nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] During sync_power_state the instance has a pending task (resize_migrating). Skip. [ 1265.386699] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "0a246b14-5078-4549-a270-73f99a1647c7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.386854] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "7705f1c5-3b96-426c-9553-b67f2951825b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.387067] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "2ac0c165-a898-4d23-a346-2567921caf1b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.387827] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1b4c78-7756-4bb1-a678-fd3aaddab0cc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.391021] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8fc66e-1a4d-4ed5-9cfb-4d132d5bad1e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.393095] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff649163-67ea-4f47-a1e6-27950ef9d94a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.395641] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249a293f-7b23-4b1b-84f7-2842da738a37 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.398239] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6394b1ff-156a-4f09-9c38-2ff7f2f72e14 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.400730] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98721b50-72ba-47b5-99da-57229d043408 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.411284] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945132, 'name': Rename_Task, 'duration_secs': 0.161379} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.418832] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1265.424033] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15938abb-70c8-4d21-8038-547c1bca3d55 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.432062] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1265.432062] env[68437]: value = "task-2945133" [ 1265.432062] env[68437]: _type = "Task" [ 1265.432062] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.439680] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.472212] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5297fd47-f54b-679c-3fc5-3d73e4340e78, 'name': SearchDatastore_Task, 'duration_secs': 0.011528} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.472442] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.472703] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2ac0c165-a898-4d23-a346-2567921caf1b/2ac0c165-a898-4d23-a346-2567921caf1b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1265.473025] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-142d5905-50c1-4906-98aa-deb248956bdf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.480717] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1265.480717] env[68437]: value = "task-2945134" [ 1265.480717] env[68437]: _type = "Task" [ 1265.480717] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.487041] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cfb3d6-b7ea-482e-bce6-d2b01fae970c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.492822] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945134, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.508232] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance '0a246b14-5078-4549-a270-73f99a1647c7' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1265.924371] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.539s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.924892] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.541s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.925215] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "191b441c-2c9f-48f9-b83a-d539722e6375" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.541s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.925571] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.542s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.925891] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.929423] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.942293] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945133, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.966062] env[68437]: DEBUG oslo_concurrency.lockutils [None req-529fc81c-d6bf-4a96-af12-8115f9d35ffa tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.282s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.967220] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "aff861ed-e792-480a-811e-c157c0606d08" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.583s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.968348] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adb107b-abcf-4db2-8213-770e868f4dbf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.990790] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945134, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499483} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.991047] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2ac0c165-a898-4d23-a346-2567921caf1b/2ac0c165-a898-4d23-a346-2567921caf1b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1265.991288] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1265.991559] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-855d25ae-eda2-4acf-90ef-a5eae4692214 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.006957] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1266.006957] env[68437]: value = "task-2945135" [ 1266.006957] env[68437]: _type = "Task" [ 1266.006957] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.013451] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1266.016488] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a379ddcc-bbfa-40d0-8d77-6b5ff1871bb1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.018188] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945135, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.022367] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1266.022367] env[68437]: value = "task-2945136" [ 1266.022367] env[68437]: _type = "Task" [ 1266.022367] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.030079] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945136, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.448229] env[68437]: DEBUG oslo_vmware.api [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945133, 'name': PowerOnVM_Task, 'duration_secs': 0.512036} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.448521] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1266.449208] env[68437]: INFO nova.compute.manager [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Took 7.76 seconds to spawn the instance on the hypervisor. [ 1266.449208] env[68437]: DEBUG nova.compute.manager [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1266.449683] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00e6f95-caf6-4c69-88a7-410c043bc155 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.477812] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "aff861ed-e792-480a-811e-c157c0606d08" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.510s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.517509] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945135, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07351} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.517798] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1266.518666] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80471302-d4aa-4588-87d3-8ea5ba637257 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.540848] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 2ac0c165-a898-4d23-a346-2567921caf1b/2ac0c165-a898-4d23-a346-2567921caf1b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1266.543767] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-973f9167-ce89-4752-b1f3-9eb707eb00e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.562245] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945136, 'name': PowerOffVM_Task, 'duration_secs': 0.415376} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.563502] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1266.563701] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance '0a246b14-5078-4549-a270-73f99a1647c7' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1266.567099] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1266.567099] env[68437]: value = "task-2945137" [ 1266.567099] env[68437]: _type = "Task" [ 1266.567099] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.575210] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945137, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.764509] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.944279] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.944549] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.944789] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "aff861ed-e792-480a-811e-c157c0606d08-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.945579] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.945579] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.947192] env[68437]: INFO nova.compute.manager [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Terminating instance [ 1266.970118] env[68437]: INFO nova.compute.manager [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Took 13.06 seconds to build instance. [ 1267.070131] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1267.070427] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1267.070540] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1267.070741] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1267.070888] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1267.071043] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1267.071257] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1267.071447] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1267.071637] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1267.071825] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1267.072065] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1267.077239] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9260576a-b036-40d4-a845-1b8bbaed5b43 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.097356] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945137, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.098571] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1267.098571] env[68437]: value = "task-2945138" [ 1267.098571] env[68437]: _type = "Task" [ 1267.098571] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.106077] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945138, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.230555] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1267.230771] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1267.451528] env[68437]: DEBUG nova.compute.manager [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1267.451768] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1267.452694] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbba6963-3634-4365-927f-f2c9dba08d4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.460292] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1267.460520] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8aac8f5b-ac02-41f7-9bac-da4114ae308e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.467346] env[68437]: DEBUG oslo_vmware.api [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1267.467346] env[68437]: value = "task-2945139" [ 1267.467346] env[68437]: _type = "Task" [ 1267.467346] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.471121] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e4cd39ba-26cc-4b77-a46e-4fdcfdd3eee3 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.577s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.471380] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.084s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.471590] env[68437]: INFO nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] During sync_power_state the instance has a pending task (spawning). Skip. [ 1267.471776] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.475066] env[68437]: DEBUG oslo_vmware.api [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945139, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.589105] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945137, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.608040] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945138, 'name': ReconfigVM_Task, 'duration_secs': 0.371487} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.608340] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance '0a246b14-5078-4549-a270-73f99a1647c7' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1267.977812] env[68437]: DEBUG oslo_vmware.api [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945139, 'name': PowerOffVM_Task, 'duration_secs': 0.227373} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.978145] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1267.979029] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1267.979029] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04c1e8e8-b99f-4f55-8347-039d40255b95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.048730] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1268.048730] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1268.048730] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleting the datastore file [datastore1] aff861ed-e792-480a-811e-c157c0606d08 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1268.048730] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-148e4ce0-a9f7-428d-9a74-ff8f9d3b4d01 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.054463] env[68437]: DEBUG oslo_vmware.api [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1268.054463] env[68437]: value = "task-2945141" [ 1268.054463] env[68437]: _type = "Task" [ 1268.054463] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.062259] env[68437]: DEBUG oslo_vmware.api [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.086709] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945137, 'name': ReconfigVM_Task, 'duration_secs': 1.138223} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.087014] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 2ac0c165-a898-4d23-a346-2567921caf1b/2ac0c165-a898-4d23-a346-2567921caf1b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1268.087754] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3dc8acc0-ace0-4f96-9b5d-9556dc6cd293 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.094244] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1268.094244] env[68437]: value = "task-2945142" [ 1268.094244] env[68437]: _type = "Task" [ 1268.094244] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.106300] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945142, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.114746] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1268.114975] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1268.115154] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1268.115333] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1268.115483] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1268.115623] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1268.115846] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1268.116252] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1268.116555] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1268.116892] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1268.117188] env[68437]: DEBUG nova.virt.hardware [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1268.124089] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1268.124422] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cf0b7944-3144-476d-88cb-6a02710b718e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.142594] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1268.142594] env[68437]: value = "task-2945143" [ 1268.142594] env[68437]: _type = "Task" [ 1268.142594] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.150400] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945143, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.197370] env[68437]: DEBUG nova.compute.manager [req-81b28b59-f891-4757-bdbd-c44d2a9b635a req-22892395-d9a9-414c-8f0c-770552fbf6bf service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Received event network-changed-e681ca7d-a952-4802-bfde-864f7a8362b2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1268.197484] env[68437]: DEBUG nova.compute.manager [req-81b28b59-f891-4757-bdbd-c44d2a9b635a req-22892395-d9a9-414c-8f0c-770552fbf6bf service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Refreshing instance network info cache due to event network-changed-e681ca7d-a952-4802-bfde-864f7a8362b2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1268.197704] env[68437]: DEBUG oslo_concurrency.lockutils [req-81b28b59-f891-4757-bdbd-c44d2a9b635a req-22892395-d9a9-414c-8f0c-770552fbf6bf service nova] Acquiring lock "refresh_cache-7705f1c5-3b96-426c-9553-b67f2951825b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.197936] env[68437]: DEBUG oslo_concurrency.lockutils [req-81b28b59-f891-4757-bdbd-c44d2a9b635a req-22892395-d9a9-414c-8f0c-770552fbf6bf service nova] Acquired lock "refresh_cache-7705f1c5-3b96-426c-9553-b67f2951825b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1268.197997] env[68437]: DEBUG nova.network.neutron [req-81b28b59-f891-4757-bdbd-c44d2a9b635a req-22892395-d9a9-414c-8f0c-770552fbf6bf service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Refreshing network info cache for port e681ca7d-a952-4802-bfde-864f7a8362b2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1268.231221] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.231746] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.231925] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.563994] env[68437]: DEBUG oslo_vmware.api [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156634} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.564269] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1268.564453] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1268.564647] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1268.564819] env[68437]: INFO nova.compute.manager [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: aff861ed-e792-480a-811e-c157c0606d08] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1268.565066] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1268.565262] env[68437]: DEBUG nova.compute.manager [-] [instance: aff861ed-e792-480a-811e-c157c0606d08] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1268.565355] env[68437]: DEBUG nova.network.neutron [-] [instance: aff861ed-e792-480a-811e-c157c0606d08] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1268.606946] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945142, 'name': Rename_Task, 'duration_secs': 0.15472} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.606946] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1268.607194] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fac46e2-a1d3-432e-aafe-c8b31055def0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.613731] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1268.613731] env[68437]: value = "task-2945144" [ 1268.613731] env[68437]: _type = "Task" [ 1268.613731] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.621475] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.653724] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945143, 'name': ReconfigVM_Task, 'duration_secs': 0.155663} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.654078] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1268.654990] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8ca7ea-840c-48de-b7b1-4408114de565 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.679335] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561/volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1268.679700] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9dd63073-4229-4ee7-8f97-f9dfbe8d58b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.704950] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1268.704950] env[68437]: value = "task-2945145" [ 1268.704950] env[68437]: _type = "Task" [ 1268.704950] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.716503] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945145, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.069868] env[68437]: DEBUG nova.network.neutron [req-81b28b59-f891-4757-bdbd-c44d2a9b635a req-22892395-d9a9-414c-8f0c-770552fbf6bf service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Updated VIF entry in instance network info cache for port e681ca7d-a952-4802-bfde-864f7a8362b2. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1269.070268] env[68437]: DEBUG nova.network.neutron [req-81b28b59-f891-4757-bdbd-c44d2a9b635a req-22892395-d9a9-414c-8f0c-770552fbf6bf service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Updating instance_info_cache with network_info: [{"id": "e681ca7d-a952-4802-bfde-864f7a8362b2", "address": "fa:16:3e:a5:4c:91", "network": {"id": "c6317aee-8097-4060-bdaa-a994bd073fa8", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1141805679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "73e8290afeb84bf3976cfa22d3452ca7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape681ca7d-a9", "ovs_interfaceid": "e681ca7d-a952-4802-bfde-864f7a8362b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.128965] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945144, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.215722] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945145, 'name': ReconfigVM_Task, 'duration_secs': 0.358565} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.216007] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Reconfigured VM instance instance-00000072 to attach disk [datastore1] volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561/volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1269.216274] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance '0a246b14-5078-4549-a270-73f99a1647c7' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1269.573516] env[68437]: DEBUG oslo_concurrency.lockutils [req-81b28b59-f891-4757-bdbd-c44d2a9b635a req-22892395-d9a9-414c-8f0c-770552fbf6bf service nova] Releasing lock "refresh_cache-7705f1c5-3b96-426c-9553-b67f2951825b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1269.624130] env[68437]: DEBUG oslo_vmware.api [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945144, 'name': PowerOnVM_Task, 'duration_secs': 0.517139} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.624407] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1269.624617] env[68437]: INFO nova.compute.manager [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Took 8.01 seconds to spawn the instance on the hypervisor. [ 1269.624791] env[68437]: DEBUG nova.compute.manager [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1269.625615] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96cfb8a6-3bb5-452d-bdf8-9eaa27df561d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.680924] env[68437]: DEBUG nova.network.neutron [-] [instance: aff861ed-e792-480a-811e-c157c0606d08] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.722830] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20dd015c-2727-4d4b-a849-40ba123afe0b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.742226] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e368ced-95c5-44d5-b9c0-b8f526fd3557 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.760050] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance '0a246b14-5078-4549-a270-73f99a1647c7' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1270.143631] env[68437]: INFO nova.compute.manager [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Took 12.77 seconds to build instance. [ 1270.182714] env[68437]: INFO nova.compute.manager [-] [instance: aff861ed-e792-480a-811e-c157c0606d08] Took 1.62 seconds to deallocate network for instance. [ 1270.253194] env[68437]: DEBUG nova.compute.manager [req-f28eba27-1f04-4972-b733-1f7ee40a3a40 req-7373bd13-34ac-4768-a141-d32e988bc110 service nova] [instance: aff861ed-e792-480a-811e-c157c0606d08] Received event network-vif-deleted-5f058ce1-be0f-4b97-be84-11302a668781 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1270.645467] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8621f4db-931c-446b-be55-2ed31ad71b11 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "2ac0c165-a898-4d23-a346-2567921caf1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.282s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.645869] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "2ac0c165-a898-4d23-a346-2567921caf1b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.259s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.646132] env[68437]: INFO nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] During sync_power_state the instance has a pending task (spawning). Skip. [ 1270.646350] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "2ac0c165-a898-4d23-a346-2567921caf1b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.689703] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.690034] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.690296] env[68437]: DEBUG nova.objects.instance [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'resources' on Instance uuid aff861ed-e792-480a-811e-c157c0606d08 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1270.797436] env[68437]: DEBUG oslo_concurrency.lockutils [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "2ac0c165-a898-4d23-a346-2567921caf1b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.797671] env[68437]: DEBUG oslo_concurrency.lockutils [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "2ac0c165-a898-4d23-a346-2567921caf1b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.797942] env[68437]: DEBUG nova.compute.manager [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1270.798913] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b652463e-6ed0-46ba-97cb-67fc7b210c81 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.805534] env[68437]: DEBUG nova.compute.manager [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1270.806126] env[68437]: DEBUG nova.objects.instance [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lazy-loading 'flavor' on Instance uuid 2ac0c165-a898-4d23-a346-2567921caf1b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1271.314156] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b6a653-c447-4227-9afe-f8fbb34ee81a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.322212] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f615e28-7ffe-4c1a-84c0-ad86b204e1ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.352875] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fccd49-342b-4c26-bda5-054bd49cdb71 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.360283] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd0ceb2-bd07-4fe3-81c2-a74599f59947 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.375490] env[68437]: DEBUG nova.compute.provider_tree [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.408594] env[68437]: DEBUG nova.network.neutron [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Port 505ca14d-2a80-4b29-bd5c-9d991541e9ad binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 1271.813966] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1271.814257] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86f0b26b-d817-4a25-8461-afec7cfa73dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.821862] env[68437]: DEBUG oslo_vmware.api [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1271.821862] env[68437]: value = "task-2945146" [ 1271.821862] env[68437]: _type = "Task" [ 1271.821862] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.829879] env[68437]: DEBUG oslo_vmware.api [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.879037] env[68437]: DEBUG nova.scheduler.client.report [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1272.226441] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.229972] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.230185] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.332143] env[68437]: DEBUG oslo_vmware.api [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945146, 'name': PowerOffVM_Task, 'duration_secs': 0.212833} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.332536] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1272.332580] env[68437]: DEBUG nova.compute.manager [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1272.333319] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0781895-1e91-432b-89d4-cb105b092515 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.383790] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.694s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.403387] env[68437]: INFO nova.scheduler.client.report [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted allocations for instance aff861ed-e792-480a-811e-c157c0606d08 [ 1272.436357] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "0a246b14-5078-4549-a270-73f99a1647c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.436731] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.437059] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.733367] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.733650] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.733823] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.733976] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1272.734890] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4089cf40-460f-4b7f-a999-666d7cc5ed1c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.743612] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c2f41c-e02a-45b3-9512-f71894a0f98d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.757178] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0d5cd9-d6dd-410f-b2e6-f311fc250462 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.765128] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9e3711-06e9-4782-b4b2-98b9b762cc2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.792917] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179645MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1272.792917] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1272.792917] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.844186] env[68437]: DEBUG oslo_concurrency.lockutils [None req-680b7d51-a3e8-4db8-8124-6576f35aa98c tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "2ac0c165-a898-4d23-a346-2567921caf1b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.912971] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e5c0600d-6a45-4ce6-92de-88dc7a2492d0 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "aff861ed-e792-480a-811e-c157c0606d08" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.968s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.405227] env[68437]: DEBUG oslo_concurrency.lockutils [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "2ac0c165-a898-4d23-a346-2567921caf1b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.405511] env[68437]: DEBUG oslo_concurrency.lockutils [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "2ac0c165-a898-4d23-a346-2567921caf1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.405696] env[68437]: DEBUG oslo_concurrency.lockutils [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "2ac0c165-a898-4d23-a346-2567921caf1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.405900] env[68437]: DEBUG oslo_concurrency.lockutils [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "2ac0c165-a898-4d23-a346-2567921caf1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.406081] env[68437]: DEBUG oslo_concurrency.lockutils [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "2ac0c165-a898-4d23-a346-2567921caf1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.408087] env[68437]: INFO nova.compute.manager [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Terminating instance [ 1273.482663] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.482852] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.483075] env[68437]: DEBUG nova.network.neutron [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1273.802999] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Applying migration context for instance 0a246b14-5078-4549-a270-73f99a1647c7 as it has an incoming, in-progress migration 28bc1daf-99ee-4cc6-9ea9-7809d0333e47. Migration status is post-migrating {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1273.804362] env[68437]: INFO nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating resource usage from migration 28bc1daf-99ee-4cc6-9ea9-7809d0333e47 [ 1273.826732] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 353ebb37-7e69-49d4-873e-2272cbfff6e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1273.826907] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance e2143e07-8c8d-4008-bb73-29aae91baee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1273.827056] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 191b441c-2c9f-48f9-b83a-d539722e6375 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1273.827200] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance e81e633d-34a6-443d-a2fe-95e6d8afa552 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1273.827320] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance ede62837-4ff5-44be-a015-9ea06b9126a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1273.827453] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 524c1b76-3563-482d-a676-26fa6c28a3c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1273.827574] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 7705f1c5-3b96-426c-9553-b67f2951825b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1273.827702] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 2ac0c165-a898-4d23-a346-2567921caf1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1273.828048] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Migration 28bc1daf-99ee-4cc6-9ea9-7809d0333e47 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1273.828212] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 0a246b14-5078-4549-a270-73f99a1647c7 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1273.828423] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1273.828577] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1273.914566] env[68437]: DEBUG nova.compute.manager [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1273.914820] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1273.915725] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131095c6-1c29-4911-b123-659725037ac1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.925680] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1273.926748] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c88dbc9e-aaaf-41ca-8799-c1cd115dc98b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.948719] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da306b2c-3683-438e-88cc-7df44ad664a6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.955435] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfefd5c-08c2-4c04-ba0a-c1cc549fb725 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.988922] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b68dab-f2ab-4551-b260-b8b678311e97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.994360] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1273.994436] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1273.994653] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleting the datastore file [datastore1] 2ac0c165-a898-4d23-a346-2567921caf1b {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1273.996675] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7504ceb-62c6-42af-9df9-9a0c3679da2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.999246] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d29ec2-c2de-4dfa-8aec-a311381edce5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.016447] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.022166] env[68437]: DEBUG oslo_vmware.api [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1274.022166] env[68437]: value = "task-2945148" [ 1274.022166] env[68437]: _type = "Task" [ 1274.022166] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.032140] env[68437]: DEBUG oslo_vmware.api [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.195070] env[68437]: DEBUG nova.network.neutron [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance_info_cache with network_info: [{"id": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "address": "fa:16:3e:34:9d:f3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505ca14d-2a", "ovs_interfaceid": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.524428] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1274.536362] env[68437]: DEBUG oslo_vmware.api [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185822} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.536619] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1274.536807] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1274.536985] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1274.537184] env[68437]: INFO nova.compute.manager [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1274.537433] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1274.537635] env[68437]: DEBUG nova.compute.manager [-] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1274.537731] env[68437]: DEBUG nova.network.neutron [-] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1274.697593] env[68437]: DEBUG oslo_concurrency.lockutils [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.817752] env[68437]: DEBUG nova.compute.manager [req-041b8c3c-9f08-436e-a834-f69906e7f5ba req-f468d04d-ff2c-42dc-80d8-e4e7244d055c service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Received event network-vif-deleted-a944b19d-2c14-4969-9dfb-c1003e5c743b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1274.817961] env[68437]: INFO nova.compute.manager [req-041b8c3c-9f08-436e-a834-f69906e7f5ba req-f468d04d-ff2c-42dc-80d8-e4e7244d055c service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Neutron deleted interface a944b19d-2c14-4969-9dfb-c1003e5c743b; detaching it from the instance and deleting it from the info cache [ 1274.818138] env[68437]: DEBUG nova.network.neutron [req-041b8c3c-9f08-436e-a834-f69906e7f5ba req-f468d04d-ff2c-42dc-80d8-e4e7244d055c service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.956665] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.956921] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.031652] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1275.031907] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.239s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.206369] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc063d7-ce83-49c0-b891-29cfc49aba13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.213567] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe2d19e-6ffb-44f7-b011-d3648d8e79f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.300869] env[68437]: DEBUG nova.network.neutron [-] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.321127] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-849dee7d-95f5-4e8f-a978-c04530d80b28 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.330080] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401c0a84-7e9c-4443-aa2c-27bc16a1d606 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.360760] env[68437]: DEBUG nova.compute.manager [req-041b8c3c-9f08-436e-a834-f69906e7f5ba req-f468d04d-ff2c-42dc-80d8-e4e7244d055c service nova] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Detach interface failed, port_id=a944b19d-2c14-4969-9dfb-c1003e5c743b, reason: Instance 2ac0c165-a898-4d23-a346-2567921caf1b could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1275.459546] env[68437]: DEBUG nova.compute.manager [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1275.803868] env[68437]: INFO nova.compute.manager [-] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Took 1.27 seconds to deallocate network for instance. [ 1275.982310] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1275.982583] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.984136] env[68437]: INFO nova.compute.claims [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1276.305873] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35d258c-1053-40f7-8f95-cfed9310d5cd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.311152] env[68437]: DEBUG oslo_concurrency.lockutils [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.327414] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b720dde-8535-4755-b752-e4a197be2be9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.333996] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance '0a246b14-5078-4549-a270-73f99a1647c7' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1276.840411] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1276.840778] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8ae29c6-af01-4e93-9650-657222d5d5fe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.847676] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1276.847676] env[68437]: value = "task-2945149" [ 1276.847676] env[68437]: _type = "Task" [ 1276.847676] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.855159] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.117117] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5cfe48-2edc-4cd6-ae84-01756134bd01 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.124796] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b52f97-8ac4-4b33-ab29-45193e79e5c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.154066] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1f5993-59d9-45f5-bed4-b38a43632fdf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.161127] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8842a537-cb25-4d00-894d-d4eafecfd107 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.175234] env[68437]: DEBUG nova.compute.provider_tree [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.357725] env[68437]: DEBUG oslo_vmware.api [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945149, 'name': PowerOnVM_Task, 'duration_secs': 0.383123} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.357930] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1277.358127] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-d6098a79-c1c9-4dcb-8a2a-9b52ffa1560f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance '0a246b14-5078-4549-a270-73f99a1647c7' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1277.678649] env[68437]: DEBUG nova.scheduler.client.report [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1278.183827] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.184327] env[68437]: DEBUG nova.compute.manager [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1278.186989] env[68437]: DEBUG oslo_concurrency.lockutils [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.876s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1278.187215] env[68437]: DEBUG nova.objects.instance [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lazy-loading 'resources' on Instance uuid 2ac0c165-a898-4d23-a346-2567921caf1b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1278.689717] env[68437]: DEBUG nova.compute.utils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1278.693793] env[68437]: DEBUG nova.compute.manager [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1278.693976] env[68437]: DEBUG nova.network.neutron [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1278.732655] env[68437]: DEBUG nova.policy [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '44a64439ac8d41239fad856a83a02e1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e28f7fd8c8d412f8c9e1624c55d6604', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1278.833595] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e344fd54-4cec-49dc-9545-d82f09a26b9f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.841184] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16224d7e-636e-476f-92dc-b8877bff8737 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.870735] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce46a97f-408c-4f84-b23e-f9930a8afe56 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.877438] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3c884c-c0e9-4dd8-9467-b2825aa3e08c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.890202] env[68437]: DEBUG nova.compute.provider_tree [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1279.023776] env[68437]: DEBUG nova.network.neutron [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Successfully created port: d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1279.031792] env[68437]: DEBUG nova.compute.manager [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Stashing vm_state: active {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1279.195100] env[68437]: DEBUG nova.compute.manager [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1279.394075] env[68437]: DEBUG nova.scheduler.client.report [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1279.553548] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1279.731819] env[68437]: DEBUG oslo_concurrency.lockutils [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "0a246b14-5078-4549-a270-73f99a1647c7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1279.732143] env[68437]: DEBUG oslo_concurrency.lockutils [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.732407] env[68437]: DEBUG nova.compute.manager [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Going to confirm migration 7 {{(pid=68437) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1279.898495] env[68437]: DEBUG oslo_concurrency.lockutils [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.711s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1279.900977] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.348s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.917328] env[68437]: INFO nova.scheduler.client.report [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted allocations for instance 2ac0c165-a898-4d23-a346-2567921caf1b [ 1280.206068] env[68437]: DEBUG nova.compute.manager [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1280.229981] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1280.230253] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1280.230411] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1280.230594] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1280.230742] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1280.230891] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1280.231112] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1280.231278] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1280.231453] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1280.231611] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1280.231784] env[68437]: DEBUG nova.virt.hardware [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1280.232661] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adde031c-79d2-47e3-818c-fbd45e72f903 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.242726] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce4d02a-145b-4aec-a762-224a9c452853 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.329099] env[68437]: DEBUG oslo_concurrency.lockutils [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.329099] env[68437]: DEBUG oslo_concurrency.lockutils [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquired lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.329099] env[68437]: DEBUG nova.network.neutron [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1280.329291] env[68437]: DEBUG nova.objects.instance [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lazy-loading 'info_cache' on Instance uuid 0a246b14-5078-4549-a270-73f99a1647c7 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1280.405580] env[68437]: INFO nova.compute.claims [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1280.421413] env[68437]: DEBUG nova.compute.manager [req-e9092a20-0d65-4e0f-8099-c5c98b1479f4 req-e81e96d1-4940-48b1-bcef-354f2030532f service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received event network-vif-plugged-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1280.421689] env[68437]: DEBUG oslo_concurrency.lockutils [req-e9092a20-0d65-4e0f-8099-c5c98b1479f4 req-e81e96d1-4940-48b1-bcef-354f2030532f service nova] Acquiring lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.421923] env[68437]: DEBUG oslo_concurrency.lockutils [req-e9092a20-0d65-4e0f-8099-c5c98b1479f4 req-e81e96d1-4940-48b1-bcef-354f2030532f service nova] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.422079] env[68437]: DEBUG oslo_concurrency.lockutils [req-e9092a20-0d65-4e0f-8099-c5c98b1479f4 req-e81e96d1-4940-48b1-bcef-354f2030532f service nova] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.422268] env[68437]: DEBUG nova.compute.manager [req-e9092a20-0d65-4e0f-8099-c5c98b1479f4 req-e81e96d1-4940-48b1-bcef-354f2030532f service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] No waiting events found dispatching network-vif-plugged-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1280.422382] env[68437]: WARNING nova.compute.manager [req-e9092a20-0d65-4e0f-8099-c5c98b1479f4 req-e81e96d1-4940-48b1-bcef-354f2030532f service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received unexpected event network-vif-plugged-d3799e51-78a6-4580-a8ae-68366989843d for instance with vm_state building and task_state spawning. [ 1280.424881] env[68437]: DEBUG oslo_concurrency.lockutils [None req-78dce544-ff65-48c7-acc8-e231f8c8f972 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "2ac0c165-a898-4d23-a346-2567921caf1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.019s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.508044] env[68437]: DEBUG nova.network.neutron [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Successfully updated port: d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1280.914076] env[68437]: INFO nova.compute.resource_tracker [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating resource usage from migration 169f100e-c1bf-4675-b96e-3bc7b32841d0 [ 1281.011966] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.012028] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.012158] env[68437]: DEBUG nova.network.neutron [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1281.039160] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550f3417-5ab5-414a-b0b4-2febc5fa88b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.048129] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170b2e87-8167-4bd7-9bef-41129af9562b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.730060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "e81e633d-34a6-443d-a2fe-95e6d8afa552" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.730415] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.730452] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "e81e633d-34a6-443d-a2fe-95e6d8afa552-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.730633] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.730801] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.736370] env[68437]: INFO nova.compute.manager [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Terminating instance [ 1281.738060] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3156241-b0cb-4ec6-a965-0dd4d0d3341a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.747848] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa3db6c-e656-4434-ba45-93a966d05cfb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.761747] env[68437]: DEBUG nova.compute.provider_tree [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.784993] env[68437]: DEBUG nova.network.neutron [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1281.982324] env[68437]: DEBUG nova.network.neutron [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating instance_info_cache with network_info: [{"id": "d3799e51-78a6-4580-a8ae-68366989843d", "address": "fa:16:3e:94:2c:29", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3799e51-78", "ovs_interfaceid": "d3799e51-78a6-4580-a8ae-68366989843d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.040305] env[68437]: DEBUG nova.network.neutron [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance_info_cache with network_info: [{"id": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "address": "fa:16:3e:34:9d:f3", "network": {"id": "cf3dc5a5-3281-4466-8645-74e531573ce7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1166667718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40d8becefc85431b9723c72aa09d152b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap505ca14d-2a", "ovs_interfaceid": "505ca14d-2a80-4b29-bd5c-9d991541e9ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.243116] env[68437]: DEBUG nova.compute.manager [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1282.243116] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1282.243853] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d221869e-5f6b-496d-9f50-73c58f72f8a5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.252129] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1282.252344] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71f3b722-49e0-4b2a-9e01-985187baae16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.258651] env[68437]: DEBUG oslo_vmware.api [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1282.258651] env[68437]: value = "task-2945150" [ 1282.258651] env[68437]: _type = "Task" [ 1282.258651] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.266649] env[68437]: DEBUG nova.scheduler.client.report [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1282.270483] env[68437]: DEBUG oslo_vmware.api [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945150, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.450157] env[68437]: DEBUG nova.compute.manager [req-f2666c7f-3220-4624-8ef3-d514442832aa req-613947d3-2f6b-4e61-99b3-f369ccb1c27a service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received event network-changed-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1282.450432] env[68437]: DEBUG nova.compute.manager [req-f2666c7f-3220-4624-8ef3-d514442832aa req-613947d3-2f6b-4e61-99b3-f369ccb1c27a service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Refreshing instance network info cache due to event network-changed-d3799e51-78a6-4580-a8ae-68366989843d. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1282.450681] env[68437]: DEBUG oslo_concurrency.lockutils [req-f2666c7f-3220-4624-8ef3-d514442832aa req-613947d3-2f6b-4e61-99b3-f369ccb1c27a service nova] Acquiring lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.484990] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.485332] env[68437]: DEBUG nova.compute.manager [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Instance network_info: |[{"id": "d3799e51-78a6-4580-a8ae-68366989843d", "address": "fa:16:3e:94:2c:29", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3799e51-78", "ovs_interfaceid": "d3799e51-78a6-4580-a8ae-68366989843d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1282.485637] env[68437]: DEBUG oslo_concurrency.lockutils [req-f2666c7f-3220-4624-8ef3-d514442832aa req-613947d3-2f6b-4e61-99b3-f369ccb1c27a service nova] Acquired lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1282.485871] env[68437]: DEBUG nova.network.neutron [req-f2666c7f-3220-4624-8ef3-d514442832aa req-613947d3-2f6b-4e61-99b3-f369ccb1c27a service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Refreshing network info cache for port d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1282.487058] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:2c:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3799e51-78a6-4580-a8ae-68366989843d', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1282.496536] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1282.499112] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1282.499542] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90bdc5be-4fab-4206-bedc-3e6ef542c18a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.518753] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1282.518753] env[68437]: value = "task-2945151" [ 1282.518753] env[68437]: _type = "Task" [ 1282.518753] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.526131] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945151, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.542450] env[68437]: DEBUG oslo_concurrency.lockutils [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Releasing lock "refresh_cache-0a246b14-5078-4549-a270-73f99a1647c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.542754] env[68437]: DEBUG nova.objects.instance [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lazy-loading 'migration_context' on Instance uuid 0a246b14-5078-4549-a270-73f99a1647c7 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1282.705717] env[68437]: DEBUG nova.network.neutron [req-f2666c7f-3220-4624-8ef3-d514442832aa req-613947d3-2f6b-4e61-99b3-f369ccb1c27a service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updated VIF entry in instance network info cache for port d3799e51-78a6-4580-a8ae-68366989843d. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1282.706170] env[68437]: DEBUG nova.network.neutron [req-f2666c7f-3220-4624-8ef3-d514442832aa req-613947d3-2f6b-4e61-99b3-f369ccb1c27a service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating instance_info_cache with network_info: [{"id": "d3799e51-78a6-4580-a8ae-68366989843d", "address": "fa:16:3e:94:2c:29", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3799e51-78", "ovs_interfaceid": "d3799e51-78a6-4580-a8ae-68366989843d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.767853] env[68437]: DEBUG oslo_vmware.api [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945150, 'name': PowerOffVM_Task, 'duration_secs': 0.183408} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.768233] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1282.768233] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1282.768446] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a96e891-0c7b-4bc8-ae68-27f967bb0cf8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.771637] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.871s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.771807] env[68437]: INFO nova.compute.manager [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Migrating [ 1282.832902] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1282.833126] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1282.833308] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleting the datastore file [datastore2] e81e633d-34a6-443d-a2fe-95e6d8afa552 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1282.833623] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94a05f47-6cca-4428-94d4-f715499ac14a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.839564] env[68437]: DEBUG oslo_vmware.api [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for the task: (returnval){ [ 1282.839564] env[68437]: value = "task-2945153" [ 1282.839564] env[68437]: _type = "Task" [ 1282.839564] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.846672] env[68437]: DEBUG oslo_vmware.api [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945153, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.028057] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945151, 'name': CreateVM_Task, 'duration_secs': 0.308225} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.028186] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1283.035757] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.035940] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.036304] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1283.036562] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55a65f61-26b7-4b84-89de-5dd7332d2e21 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.041145] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1283.041145] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522e6a8c-64a7-d91f-8fe4-9963a10d512d" [ 1283.041145] env[68437]: _type = "Task" [ 1283.041145] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.045265] env[68437]: DEBUG nova.objects.base [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Object Instance<0a246b14-5078-4549-a270-73f99a1647c7> lazy-loaded attributes: info_cache,migration_context {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1283.046025] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0ae65d-a4f8-40b1-aebd-ff19714afddd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.053958] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]522e6a8c-64a7-d91f-8fe4-9963a10d512d, 'name': SearchDatastore_Task, 'duration_secs': 0.009734} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.066451] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.066692] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1283.066925] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.067081] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.067262] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1283.068408] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c43fabb-0bac-444d-bfb4-7822bbf5a8fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.070449] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-479a79d2-c486-45f6-ae76-f3390016a5f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.075183] env[68437]: DEBUG oslo_vmware.api [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1283.075183] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ac0181-d56a-1c8b-1f0f-48fe9aa15144" [ 1283.075183] env[68437]: _type = "Task" [ 1283.075183] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.080334] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1283.080503] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1283.081387] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7ed2f7a-dfee-4f71-b13b-5bb28141b588 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.086101] env[68437]: DEBUG oslo_vmware.api [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ac0181-d56a-1c8b-1f0f-48fe9aa15144, 'name': SearchDatastore_Task, 'duration_secs': 0.005976} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.086658] env[68437]: DEBUG oslo_concurrency.lockutils [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1283.086919] env[68437]: DEBUG oslo_concurrency.lockutils [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1283.089088] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1283.089088] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1e5da-7675-4b56-87f3-797d808dea66" [ 1283.089088] env[68437]: _type = "Task" [ 1283.089088] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.096434] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1e5da-7675-4b56-87f3-797d808dea66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.208703] env[68437]: DEBUG oslo_concurrency.lockutils [req-f2666c7f-3220-4624-8ef3-d514442832aa req-613947d3-2f6b-4e61-99b3-f369ccb1c27a service nova] Releasing lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.286892] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.286892] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.286892] env[68437]: DEBUG nova.network.neutron [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1283.350301] env[68437]: DEBUG oslo_vmware.api [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Task: {'id': task-2945153, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132268} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.350552] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1283.350735] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1283.350916] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1283.351105] env[68437]: INFO nova.compute.manager [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1283.351349] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1283.351683] env[68437]: DEBUG nova.compute.manager [-] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1283.351790] env[68437]: DEBUG nova.network.neutron [-] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1283.604824] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52a1e5da-7675-4b56-87f3-797d808dea66, 'name': SearchDatastore_Task, 'duration_secs': 0.007955} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.605676] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e6d439c-6ce3-44a7-b301-67458da6d65d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.610874] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1283.610874] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]527cdc13-f52f-6ed7-0e34-b900e53068a4" [ 1283.610874] env[68437]: _type = "Task" [ 1283.610874] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.623242] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]527cdc13-f52f-6ed7-0e34-b900e53068a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009593} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.623490] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.623770] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681/6d4f617c-97d6-4721-97c0-7a6b8676d681.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1283.624033] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a5753d4-9f97-401a-bd0c-008fbc07442f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.632065] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1283.632065] env[68437]: value = "task-2945154" [ 1283.632065] env[68437]: _type = "Task" [ 1283.632065] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.639817] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945154, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.737301] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca043631-5efb-48ba-9ca1-5ac65d0ccf68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.744230] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b19c2d-7d04-4bcc-aba7-3faf0160186c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.775435] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cfde71-19f1-401d-8498-99a65adb3649 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.782807] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37432e9-0f96-4d08-842b-4d70eda8b9a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.798446] env[68437]: DEBUG nova.compute.provider_tree [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1284.040642] env[68437]: DEBUG nova.network.neutron [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance_info_cache with network_info: [{"id": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "address": "fa:16:3e:51:30:91", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6919baa-a3", "ovs_interfaceid": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.073353] env[68437]: DEBUG nova.network.neutron [-] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.142435] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945154, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.428249} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.142613] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681/6d4f617c-97d6-4721-97c0-7a6b8676d681.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1284.142833] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1284.143086] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1b96b16-3bd3-4f49-8e37-9cb964fcad49 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.149177] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1284.149177] env[68437]: value = "task-2945155" [ 1284.149177] env[68437]: _type = "Task" [ 1284.149177] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.156928] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945155, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.319306] env[68437]: ERROR nova.scheduler.client.report [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [req-ca808f82-e15b-4780-ab96-a481e631c55a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 422e986f-b38b-46ad-94b3-91f3ccd10a05. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ca808f82-e15b-4780-ab96-a481e631c55a"}]} [ 1284.337021] env[68437]: DEBUG nova.scheduler.client.report [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1284.349824] env[68437]: DEBUG nova.scheduler.client.report [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1284.350058] env[68437]: DEBUG nova.compute.provider_tree [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1284.362295] env[68437]: DEBUG nova.scheduler.client.report [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1284.378467] env[68437]: DEBUG nova.scheduler.client.report [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1284.477974] env[68437]: DEBUG nova.compute.manager [req-83413f71-88d7-4109-b567-6ea72a42eb4d req-c98d42c2-16c8-4298-9776-6da6afe900df service nova] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Received event network-vif-deleted-a8b8a7ab-ddfb-4e13-a1aa-9d38ca91fc8b {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1284.515189] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86aaf12d-128f-49ea-9d17-4c41249adecb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.522800] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6580403-27b6-4613-9924-5ccda86d901f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.552781] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.556067] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde30be2-8368-498e-8989-744acc38d05f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.563674] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-642a401f-7334-44c8-812d-d3b22869d12d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.578399] env[68437]: INFO nova.compute.manager [-] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Took 1.23 seconds to deallocate network for instance. [ 1284.579379] env[68437]: DEBUG nova.compute.provider_tree [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1284.658971] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945155, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070301} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.659265] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1284.660038] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad64462-d7e6-4b83-a41e-7c706147e097 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.681211] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681/6d4f617c-97d6-4721-97c0-7a6b8676d681.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1284.681835] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09bd7e6d-faa4-4a3b-8aad-6f91f565c5e1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.700887] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1284.700887] env[68437]: value = "task-2945156" [ 1284.700887] env[68437]: _type = "Task" [ 1284.700887] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.708303] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945156, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.086697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.110274] env[68437]: DEBUG nova.scheduler.client.report [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updated inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with generation 156 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1285.110548] env[68437]: DEBUG nova.compute.provider_tree [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 generation from 156 to 157 during operation: update_inventory {{(pid=68437) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1285.110731] env[68437]: DEBUG nova.compute.provider_tree [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1285.211109] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945156, 'name': ReconfigVM_Task, 'duration_secs': 0.305548} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.211366] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681/6d4f617c-97d6-4721-97c0-7a6b8676d681.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1285.211972] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70eab001-7972-4a7b-95b0-e8a3ec3d9aff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.218038] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1285.218038] env[68437]: value = "task-2945157" [ 1285.218038] env[68437]: _type = "Task" [ 1285.218038] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.225588] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945157, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.729198] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945157, 'name': Rename_Task, 'duration_secs': 0.140335} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.729478] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1285.729723] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f1f7bac-9377-4d49-8a77-d6ad31844b77 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.735692] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1285.735692] env[68437]: value = "task-2945158" [ 1285.735692] env[68437]: _type = "Task" [ 1285.735692] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.742814] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945158, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.070118] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7054fe-98ac-4e28-ab2f-3be2cca4f8a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.088558] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance '524c1b76-3563-482d-a676-26fa6c28a3c7' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1286.120571] env[68437]: DEBUG oslo_concurrency.lockutils [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.033s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.123903] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.037s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.123903] env[68437]: DEBUG nova.objects.instance [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lazy-loading 'resources' on Instance uuid e81e633d-34a6-443d-a2fe-95e6d8afa552 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1286.249068] env[68437]: DEBUG oslo_vmware.api [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945158, 'name': PowerOnVM_Task, 'duration_secs': 0.44485} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.249068] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1286.249068] env[68437]: INFO nova.compute.manager [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Took 6.04 seconds to spawn the instance on the hypervisor. [ 1286.249068] env[68437]: DEBUG nova.compute.manager [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1286.249068] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641ebb17-4549-4c88-afee-d3c2745af130 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.594780] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1286.595125] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e6722b9-2318-4085-8d00-a8c52fc96d20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.602831] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1286.602831] env[68437]: value = "task-2945159" [ 1286.602831] env[68437]: _type = "Task" [ 1286.602831] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.610790] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.677607] env[68437]: INFO nova.scheduler.client.report [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted allocation for migration 28bc1daf-99ee-4cc6-9ea9-7809d0333e47 [ 1286.767591] env[68437]: INFO nova.compute.manager [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Took 10.80 seconds to build instance. [ 1286.774658] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47824d40-30bb-4b6b-925d-a517713d065b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.782693] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cedbec-9c03-4816-b3f2-60562192c4b3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.815126] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e16521-29e0-43b8-898a-bdb7629bb6b3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.821641] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32929b97-2aed-47f2-9c95-d90cefb5bd68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.835940] env[68437]: DEBUG nova.compute.provider_tree [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1287.025697] env[68437]: DEBUG nova.compute.manager [req-cdc0d36c-f5f3-4ed9-85e4-37abaafc6800 req-4d618497-838d-45f2-8fe6-c29e42865f89 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received event network-changed-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1287.025973] env[68437]: DEBUG nova.compute.manager [req-cdc0d36c-f5f3-4ed9-85e4-37abaafc6800 req-4d618497-838d-45f2-8fe6-c29e42865f89 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Refreshing instance network info cache due to event network-changed-d3799e51-78a6-4580-a8ae-68366989843d. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1287.026217] env[68437]: DEBUG oslo_concurrency.lockutils [req-cdc0d36c-f5f3-4ed9-85e4-37abaafc6800 req-4d618497-838d-45f2-8fe6-c29e42865f89 service nova] Acquiring lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.026362] env[68437]: DEBUG oslo_concurrency.lockutils [req-cdc0d36c-f5f3-4ed9-85e4-37abaafc6800 req-4d618497-838d-45f2-8fe6-c29e42865f89 service nova] Acquired lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1287.026524] env[68437]: DEBUG nova.network.neutron [req-cdc0d36c-f5f3-4ed9-85e4-37abaafc6800 req-4d618497-838d-45f2-8fe6-c29e42865f89 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Refreshing network info cache for port d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1287.112862] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945159, 'name': PowerOffVM_Task, 'duration_secs': 0.239967} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.113159] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1287.113352] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance '524c1b76-3563-482d-a676-26fa6c28a3c7' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1287.187819] env[68437]: DEBUG oslo_concurrency.lockutils [None req-448ce761-7bea-487e-ae64-43f94024c0ac tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.456s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.270221] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bceb8161-c48e-4d18-93a2-09e32ced473b tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.313s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.339032] env[68437]: DEBUG nova.scheduler.client.report [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1287.377846] env[68437]: INFO nova.compute.manager [None req-c0bacfd7-d783-44b0-b2c0-8b204e30ee21 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Get console output [ 1287.378090] env[68437]: WARNING nova.virt.vmwareapi.driver [None req-c0bacfd7-d783-44b0-b2c0-8b204e30ee21 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] The console log is missing. Check your VSPC configuration [ 1287.619537] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1287.619867] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1287.619926] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1287.620120] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1287.620276] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1287.620424] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1287.620632] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1287.620793] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1287.620960] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1287.621138] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1287.621317] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1287.628768] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44d0d237-01e7-48c4-934a-e236e3729b0b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.645849] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1287.645849] env[68437]: value = "task-2945160" [ 1287.645849] env[68437]: _type = "Task" [ 1287.645849] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.654043] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.746158] env[68437]: DEBUG nova.network.neutron [req-cdc0d36c-f5f3-4ed9-85e4-37abaafc6800 req-4d618497-838d-45f2-8fe6-c29e42865f89 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updated VIF entry in instance network info cache for port d3799e51-78a6-4580-a8ae-68366989843d. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1287.746537] env[68437]: DEBUG nova.network.neutron [req-cdc0d36c-f5f3-4ed9-85e4-37abaafc6800 req-4d618497-838d-45f2-8fe6-c29e42865f89 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating instance_info_cache with network_info: [{"id": "d3799e51-78a6-4580-a8ae-68366989843d", "address": "fa:16:3e:94:2c:29", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3799e51-78", "ovs_interfaceid": "d3799e51-78a6-4580-a8ae-68366989843d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.845073] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.722s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.867808] env[68437]: INFO nova.scheduler.client.report [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Deleted allocations for instance e81e633d-34a6-443d-a2fe-95e6d8afa552 [ 1288.156325] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945160, 'name': ReconfigVM_Task, 'duration_secs': 0.274907} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.156684] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance '524c1b76-3563-482d-a676-26fa6c28a3c7' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1288.249878] env[68437]: DEBUG oslo_concurrency.lockutils [req-cdc0d36c-f5f3-4ed9-85e4-37abaafc6800 req-4d618497-838d-45f2-8fe6-c29e42865f89 service nova] Releasing lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1288.375212] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bf71e70c-6fd0-4852-9800-6b7e7a043f47 tempest-ServersTestJSON-973830923 tempest-ServersTestJSON-973830923-project-member] Lock "e81e633d-34a6-443d-a2fe-95e6d8afa552" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.645s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.663370] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1288.663684] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1288.663859] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1288.664087] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1288.664265] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1288.664429] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1288.664722] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1288.664993] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1288.665248] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1288.665508] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1288.665739] env[68437]: DEBUG nova.virt.hardware [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1288.671549] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1288.671873] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ee9a53d-b184-4037-9950-32398cdff0b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.692165] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1288.692165] env[68437]: value = "task-2945161" [ 1288.692165] env[68437]: _type = "Task" [ 1288.692165] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.700446] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945161, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.201731] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945161, 'name': ReconfigVM_Task, 'duration_secs': 0.187709} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.202055] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1289.202881] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08877f4f-6f0e-4a31-9025-60f51e0ebe16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.226962] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 524c1b76-3563-482d-a676-26fa6c28a3c7/524c1b76-3563-482d-a676-26fa6c28a3c7.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1289.226962] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95d930c1-6b24-4b34-acd9-95d6598a6319 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.244201] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1289.244201] env[68437]: value = "task-2945162" [ 1289.244201] env[68437]: _type = "Task" [ 1289.244201] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.251711] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945162, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.754363] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.253937] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945162, 'name': ReconfigVM_Task, 'duration_secs': 0.731649} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.254311] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 524c1b76-3563-482d-a676-26fa6c28a3c7/524c1b76-3563-482d-a676-26fa6c28a3c7.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1290.254583] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance '524c1b76-3563-482d-a676-26fa6c28a3c7' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1290.761401] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203322ff-b952-45b0-a343-d74ce6fd9a74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.780421] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d6eb37-131c-4389-b8a4-6040adb02f55 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.798439] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance '524c1b76-3563-482d-a676-26fa6c28a3c7' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1291.337216] env[68437]: DEBUG nova.network.neutron [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Port f6919baa-a381-4bb9-bb35-d535d859a1e4 binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 1292.359146] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "524c1b76-3563-482d-a676-26fa6c28a3c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.359587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.359587] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.392793] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.393050] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.393193] env[68437]: DEBUG nova.network.neutron [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1294.090306] env[68437]: DEBUG nova.network.neutron [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance_info_cache with network_info: [{"id": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "address": "fa:16:3e:51:30:91", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6919baa-a3", "ovs_interfaceid": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.593111] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1295.117997] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bee9d7c-df1f-4eb9-adfd-a0449dd0e245 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.138200] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94696984-d3af-4aba-9c5a-1b8bc0736635 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.145036] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance '524c1b76-3563-482d-a676-26fa6c28a3c7' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1295.650707] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1295.651053] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-224e09d1-d84e-42fa-955e-4279040c3de9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.657947] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1295.657947] env[68437]: value = "task-2945163" [ 1295.657947] env[68437]: _type = "Task" [ 1295.657947] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.665579] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945163, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.168283] env[68437]: DEBUG oslo_vmware.api [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945163, 'name': PowerOnVM_Task, 'duration_secs': 0.360514} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.168513] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1296.168700] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ad5aa2ac-a123-4bc5-a7c8-878d07e21902 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance '524c1b76-3563-482d-a676-26fa6c28a3c7' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1298.639990] env[68437]: DEBUG nova.network.neutron [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Port f6919baa-a381-4bb9-bb35-d535d859a1e4 binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 1298.640290] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.640439] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.640605] env[68437]: DEBUG nova.network.neutron [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1299.331861] env[68437]: DEBUG nova.network.neutron [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance_info_cache with network_info: [{"id": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "address": "fa:16:3e:51:30:91", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6919baa-a3", "ovs_interfaceid": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.834397] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1300.339468] env[68437]: DEBUG nova.compute.manager [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68437) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1300.339715] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.339977] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.843086] env[68437]: DEBUG nova.objects.instance [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'migration_context' on Instance uuid 524c1b76-3563-482d-a676-26fa6c28a3c7 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1301.465110] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82092566-a6dd-4316-8254-d31197358ab3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.472380] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c51b282-3180-4678-ae79-4c6c689ea066 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.503861] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6ae900-ddc0-4e5e-afcf-7bb3a57c33a8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.510649] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b796dcbe-88ec-4c71-bb2c-695fe59479f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.522970] env[68437]: DEBUG nova.compute.provider_tree [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1302.026015] env[68437]: DEBUG nova.scheduler.client.report [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1303.036591] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.696s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.572023] env[68437]: INFO nova.compute.manager [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Swapping old allocation on dict_keys(['422e986f-b38b-46ad-94b3-91f3ccd10a05']) held by migration 169f100e-c1bf-4675-b96e-3bc7b32841d0 for instance [ 1304.594338] env[68437]: DEBUG nova.scheduler.client.report [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Overwriting current allocation {'allocations': {'422e986f-b38b-46ad-94b3-91f3ccd10a05': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 158}}, 'project_id': 'f6f6382f6c6843529a37d7c62837523a', 'user_id': '0e4b1b3012874778bc147c3e7b00133c', 'consumer_generation': 1} on consumer 524c1b76-3563-482d-a676-26fa6c28a3c7 {{(pid=68437) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1304.671271] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.671478] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1304.671658] env[68437]: DEBUG nova.network.neutron [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1305.191886] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "7705f1c5-3b96-426c-9553-b67f2951825b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1305.192147] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1305.368853] env[68437]: DEBUG nova.network.neutron [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance_info_cache with network_info: [{"id": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "address": "fa:16:3e:51:30:91", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6919baa-a3", "ovs_interfaceid": "f6919baa-a381-4bb9-bb35-d535d859a1e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.695286] env[68437]: DEBUG nova.compute.utils [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1305.871267] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-524c1b76-3563-482d-a676-26fa6c28a3c7" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.871725] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1305.872036] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0f2501a-1cbb-4325-931d-533bfd99d5e4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.880431] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1305.880431] env[68437]: value = "task-2945164" [ 1305.880431] env[68437]: _type = "Task" [ 1305.880431] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.888020] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945164, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.154140] env[68437]: INFO nova.compute.manager [None req-240111c0-6c24-4f40-b01f-f20ad04c3206 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Get console output [ 1306.154377] env[68437]: WARNING nova.virt.vmwareapi.driver [None req-240111c0-6c24-4f40-b01f-f20ad04c3206 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] The console log is missing. Check your VSPC configuration [ 1306.197969] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.392017] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945164, 'name': PowerOffVM_Task, 'duration_secs': 0.192769} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.392294] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1306.392923] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1306.393167] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1306.393323] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1306.393506] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1306.393653] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1306.393801] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1306.394020] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1306.394186] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1306.394352] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1306.394513] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1306.394682] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1306.399405] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2d893f0-1e60-471b-a09e-c0eb9035ff15 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.415109] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1306.415109] env[68437]: value = "task-2945165" [ 1306.415109] env[68437]: _type = "Task" [ 1306.415109] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.424650] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945165, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.924812] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945165, 'name': ReconfigVM_Task, 'duration_secs': 0.139024} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.925814] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb69c7e-5600-495d-b505-d6b5f31ecc53 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.943119] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1306.943376] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1306.943538] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1306.943721] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1306.943893] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1306.944065] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1306.944277] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1306.944438] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1306.944602] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1306.944764] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1306.944937] env[68437]: DEBUG nova.virt.hardware [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1306.945796] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b35260e-30db-4b7f-a9eb-4b61aeb85bb8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.951321] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1306.951321] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52acf25f-b63c-e9ea-9068-d7ec3f8592cb" [ 1306.951321] env[68437]: _type = "Task" [ 1306.951321] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.958696] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52acf25f-b63c-e9ea-9068-d7ec3f8592cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.235716] env[68437]: DEBUG oslo_concurrency.lockutils [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "191b441c-2c9f-48f9-b83a-d539722e6375" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.235990] env[68437]: DEBUG oslo_concurrency.lockutils [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "191b441c-2c9f-48f9-b83a-d539722e6375" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.236301] env[68437]: DEBUG oslo_concurrency.lockutils [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "191b441c-2c9f-48f9-b83a-d539722e6375-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.236616] env[68437]: DEBUG oslo_concurrency.lockutils [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "191b441c-2c9f-48f9-b83a-d539722e6375-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.236905] env[68437]: DEBUG oslo_concurrency.lockutils [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "191b441c-2c9f-48f9-b83a-d539722e6375-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.239267] env[68437]: INFO nova.compute.manager [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Terminating instance [ 1307.259181] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "7705f1c5-3b96-426c-9553-b67f2951825b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.259431] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.259652] env[68437]: INFO nova.compute.manager [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Attaching volume 06ba57fb-3c82-4175-b717-ece486ba640e to /dev/sdb [ 1307.295797] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d16ca77-01c8-4540-ab5c-5cad41177f22 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.304689] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c500c652-10f9-4c10-97ef-f5063ebc2007 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.320088] env[68437]: DEBUG nova.virt.block_device [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Updating existing volume attachment record: 2efaa1d1-a72a-480f-a815-0bb0c26dbac0 {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1307.460963] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52acf25f-b63c-e9ea-9068-d7ec3f8592cb, 'name': SearchDatastore_Task, 'duration_secs': 0.007767} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.466591] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1307.466864] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af0588b0-d5d7-487a-8257-1317518f3a47 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.484255] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1307.484255] env[68437]: value = "task-2945167" [ 1307.484255] env[68437]: _type = "Task" [ 1307.484255] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.493228] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.743170] env[68437]: DEBUG nova.compute.manager [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1307.743412] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1307.744335] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff34c312-cbf8-44b6-8589-4df01d4c01b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.752447] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1307.752673] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-855abf8b-471d-4948-80a5-2b7255b88c68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.759338] env[68437]: DEBUG oslo_vmware.api [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1307.759338] env[68437]: value = "task-2945168" [ 1307.759338] env[68437]: _type = "Task" [ 1307.759338] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.768970] env[68437]: DEBUG oslo_vmware.api [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2945168, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.994099] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945167, 'name': ReconfigVM_Task, 'duration_secs': 0.191578} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.994099] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1307.994804] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9071a7d6-fab1-44b9-9cad-c9d592f8d2c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.015888] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 524c1b76-3563-482d-a676-26fa6c28a3c7/524c1b76-3563-482d-a676-26fa6c28a3c7.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1308.016140] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a14d9d3-9bc2-49d9-8eb3-174e0e5f1807 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.033642] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1308.033642] env[68437]: value = "task-2945169" [ 1308.033642] env[68437]: _type = "Task" [ 1308.033642] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.041301] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945169, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.269692] env[68437]: DEBUG oslo_vmware.api [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2945168, 'name': PowerOffVM_Task, 'duration_secs': 0.167605} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.269958] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1308.270119] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1308.270379] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61026824-5e13-4c4d-9121-6419829d7eb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.339154] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1308.339448] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1308.339565] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleting the datastore file [datastore2] 191b441c-2c9f-48f9-b83a-d539722e6375 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1308.339818] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c55d04eb-a45b-402e-91f4-70c85d43cb9d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.346808] env[68437]: DEBUG oslo_vmware.api [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1308.346808] env[68437]: value = "task-2945171" [ 1308.346808] env[68437]: _type = "Task" [ 1308.346808] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.353734] env[68437]: DEBUG oslo_vmware.api [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2945171, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.543418] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945169, 'name': ReconfigVM_Task, 'duration_secs': 0.303326} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.543672] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 524c1b76-3563-482d-a676-26fa6c28a3c7/524c1b76-3563-482d-a676-26fa6c28a3c7.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1308.544522] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05790030-15d6-41f0-b08f-89f29cec096a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.561771] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b43f52c-fbac-4f39-9cda-a7a263a58217 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.578756] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f93590-07d9-4e63-9f42-05bfa54ca327 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.597170] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53fbdee-97c7-4628-b177-4f33694a7e8a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.603886] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1308.604126] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30f8b058-0931-48ef-9195-e46d77ce39f1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.610322] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1308.610322] env[68437]: value = "task-2945172" [ 1308.610322] env[68437]: _type = "Task" [ 1308.610322] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.618916] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945172, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.857308] env[68437]: DEBUG oslo_vmware.api [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2945171, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.119808] env[68437]: DEBUG oslo_vmware.api [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945172, 'name': PowerOnVM_Task, 'duration_secs': 0.489692} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.120534] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1309.357724] env[68437]: DEBUG oslo_vmware.api [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2945171, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.514395} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.359061] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1309.359061] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1309.359061] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1309.359061] env[68437]: INFO nova.compute.manager [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1309.359061] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1309.359319] env[68437]: DEBUG nova.compute.manager [-] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1309.359319] env[68437]: DEBUG nova.network.neutron [-] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1309.593630] env[68437]: DEBUG nova.compute.manager [req-93dd9e82-ae0d-4f21-895c-3c32efab3b8d req-7f956190-883a-446b-a255-963b78bb07d0 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Received event network-vif-deleted-94e0ca43-56a9-44cc-b9f1-0fa484701ba2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1309.593983] env[68437]: INFO nova.compute.manager [req-93dd9e82-ae0d-4f21-895c-3c32efab3b8d req-7f956190-883a-446b-a255-963b78bb07d0 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Neutron deleted interface 94e0ca43-56a9-44cc-b9f1-0fa484701ba2; detaching it from the instance and deleting it from the info cache [ 1309.594249] env[68437]: DEBUG nova.network.neutron [req-93dd9e82-ae0d-4f21-895c-3c32efab3b8d req-7f956190-883a-446b-a255-963b78bb07d0 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.076468] env[68437]: DEBUG nova.network.neutron [-] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.097190] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5f48e3da-d47a-4040-bef3-3aad8f84de3f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.107168] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc261687-ca34-49ee-897f-dd215fe84ff6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.138513] env[68437]: INFO nova.compute.manager [None req-c26f3226-6f95-46f3-b6bc-2c79abb73342 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance to original state: 'active' [ 1310.141420] env[68437]: DEBUG nova.compute.manager [req-93dd9e82-ae0d-4f21-895c-3c32efab3b8d req-7f956190-883a-446b-a255-963b78bb07d0 service nova] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Detach interface failed, port_id=94e0ca43-56a9-44cc-b9f1-0fa484701ba2, reason: Instance 191b441c-2c9f-48f9-b83a-d539722e6375 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1310.579885] env[68437]: INFO nova.compute.manager [-] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Took 1.22 seconds to deallocate network for instance. [ 1310.993233] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "524c1b76-3563-482d-a676-26fa6c28a3c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.993576] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.993816] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "524c1b76-3563-482d-a676-26fa6c28a3c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1310.994070] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.994257] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.996394] env[68437]: INFO nova.compute.manager [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Terminating instance [ 1311.085353] env[68437]: DEBUG oslo_concurrency.lockutils [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1311.085641] env[68437]: DEBUG oslo_concurrency.lockutils [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1311.085866] env[68437]: DEBUG nova.objects.instance [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'resources' on Instance uuid 191b441c-2c9f-48f9-b83a-d539722e6375 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1311.500379] env[68437]: DEBUG nova.compute.manager [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1311.500849] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1311.501976] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2b8d94-1215-4870-a0d3-595502b725bb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.510325] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1311.510502] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c98f186-9316-4698-b55f-652a9d444790 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.516131] env[68437]: DEBUG oslo_vmware.api [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1311.516131] env[68437]: value = "task-2945174" [ 1311.516131] env[68437]: _type = "Task" [ 1311.516131] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.524624] env[68437]: DEBUG oslo_vmware.api [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.705814] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eebfa812-e78c-46ff-a057-4f29dd0b4827 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.713542] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239801d4-3549-437e-9089-dace3af5c474 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.743808] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae167ddb-59db-49eb-9cc8-eb913b015935 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.750611] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dae34f0-3105-445a-bf79-2a59eb86fe33 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.763205] env[68437]: DEBUG nova.compute.provider_tree [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1311.862437] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1311.862744] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591154', 'volume_id': '06ba57fb-3c82-4175-b717-ece486ba640e', 'name': 'volume-06ba57fb-3c82-4175-b717-ece486ba640e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7705f1c5-3b96-426c-9553-b67f2951825b', 'attached_at': '', 'detached_at': '', 'volume_id': '06ba57fb-3c82-4175-b717-ece486ba640e', 'serial': '06ba57fb-3c82-4175-b717-ece486ba640e'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1311.863539] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd911b02-a1c2-4db2-8863-2abc60d2edfa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.879825] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d6f811-860d-4575-a64d-35945bc2d80b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.903597] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-06ba57fb-3c82-4175-b717-ece486ba640e/volume-06ba57fb-3c82-4175-b717-ece486ba640e.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1311.904165] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ce51d79-da0e-40a0-88b0-6e043461158e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.922219] env[68437]: DEBUG oslo_vmware.api [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1311.922219] env[68437]: value = "task-2945175" [ 1311.922219] env[68437]: _type = "Task" [ 1311.922219] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.929995] env[68437]: DEBUG oslo_vmware.api [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945175, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.026200] env[68437]: DEBUG oslo_vmware.api [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945174, 'name': PowerOffVM_Task, 'duration_secs': 0.188843} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.026424] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1312.026594] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1312.026838] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d697927-9bb6-4d66-997f-653568d3db86 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.097337] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1312.097550] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1312.097738] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleting the datastore file [datastore1] 524c1b76-3563-482d-a676-26fa6c28a3c7 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1312.098021] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3a12af0-6404-42a2-a78a-e64d8cf650e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.104529] env[68437]: DEBUG oslo_vmware.api [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1312.104529] env[68437]: value = "task-2945177" [ 1312.104529] env[68437]: _type = "Task" [ 1312.104529] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.111968] env[68437]: DEBUG oslo_vmware.api [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.266517] env[68437]: DEBUG nova.scheduler.client.report [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1312.431885] env[68437]: DEBUG oslo_vmware.api [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945175, 'name': ReconfigVM_Task, 'duration_secs': 0.340634} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.432277] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-06ba57fb-3c82-4175-b717-ece486ba640e/volume-06ba57fb-3c82-4175-b717-ece486ba640e.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1312.436856] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d22ebd96-7b7b-4d64-b5d0-76978afd07ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.450579] env[68437]: DEBUG oslo_vmware.api [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1312.450579] env[68437]: value = "task-2945178" [ 1312.450579] env[68437]: _type = "Task" [ 1312.450579] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.459669] env[68437]: DEBUG oslo_vmware.api [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945178, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.613533] env[68437]: DEBUG oslo_vmware.api [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249825} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.613883] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1312.614018] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1312.614253] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1312.614433] env[68437]: INFO nova.compute.manager [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1312.614669] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1312.614860] env[68437]: DEBUG nova.compute.manager [-] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1312.614950] env[68437]: DEBUG nova.network.neutron [-] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1312.771200] env[68437]: DEBUG oslo_concurrency.lockutils [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.790484] env[68437]: INFO nova.scheduler.client.report [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleted allocations for instance 191b441c-2c9f-48f9-b83a-d539722e6375 [ 1312.961988] env[68437]: DEBUG oslo_vmware.api [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945178, 'name': ReconfigVM_Task, 'duration_secs': 0.146838} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.962375] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591154', 'volume_id': '06ba57fb-3c82-4175-b717-ece486ba640e', 'name': 'volume-06ba57fb-3c82-4175-b717-ece486ba640e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7705f1c5-3b96-426c-9553-b67f2951825b', 'attached_at': '', 'detached_at': '', 'volume_id': '06ba57fb-3c82-4175-b717-ece486ba640e', 'serial': '06ba57fb-3c82-4175-b717-ece486ba640e'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1313.081269] env[68437]: DEBUG nova.compute.manager [req-4881c9a7-dc4a-4790-a08f-29869f65c236 req-4db24ddb-2650-4da2-a023-6d34f9a3b696 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Received event network-vif-deleted-f6919baa-a381-4bb9-bb35-d535d859a1e4 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1313.081465] env[68437]: INFO nova.compute.manager [req-4881c9a7-dc4a-4790-a08f-29869f65c236 req-4db24ddb-2650-4da2-a023-6d34f9a3b696 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Neutron deleted interface f6919baa-a381-4bb9-bb35-d535d859a1e4; detaching it from the instance and deleting it from the info cache [ 1313.081642] env[68437]: DEBUG nova.network.neutron [req-4881c9a7-dc4a-4790-a08f-29869f65c236 req-4db24ddb-2650-4da2-a023-6d34f9a3b696 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.302989] env[68437]: DEBUG oslo_concurrency.lockutils [None req-02c5abe5-72f2-4044-9a8b-cf759338bdea tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "191b441c-2c9f-48f9-b83a-d539722e6375" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.067s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.561227] env[68437]: DEBUG nova.network.neutron [-] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.584855] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e90649b-5a66-48ad-9184-a1149346f686 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.595498] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d2f693-6af7-4a73-a584-7a05b9b284d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.628070] env[68437]: DEBUG nova.compute.manager [req-4881c9a7-dc4a-4790-a08f-29869f65c236 req-4db24ddb-2650-4da2-a023-6d34f9a3b696 service nova] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Detach interface failed, port_id=f6919baa-a381-4bb9-bb35-d535d859a1e4, reason: Instance 524c1b76-3563-482d-a676-26fa6c28a3c7 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1313.898316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "e2143e07-8c8d-4008-bb73-29aae91baee7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.898796] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.899193] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "e2143e07-8c8d-4008-bb73-29aae91baee7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.899555] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.899951] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.902758] env[68437]: INFO nova.compute.manager [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Terminating instance [ 1314.003843] env[68437]: DEBUG nova.objects.instance [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'flavor' on Instance uuid 7705f1c5-3b96-426c-9553-b67f2951825b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1314.065835] env[68437]: INFO nova.compute.manager [-] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Took 1.45 seconds to deallocate network for instance. [ 1314.264605] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "0a246b14-5078-4549-a270-73f99a1647c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.265262] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.265262] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "0a246b14-5078-4549-a270-73f99a1647c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.265418] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.265497] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.267794] env[68437]: INFO nova.compute.manager [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Terminating instance [ 1314.411543] env[68437]: DEBUG nova.compute.manager [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1314.411800] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1314.413118] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f49c94-47c7-4275-b977-f21b4919eb8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.422097] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1314.422364] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3801277a-a03d-423b-be8a-0be1d4d0b54a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.428743] env[68437]: DEBUG oslo_vmware.api [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1314.428743] env[68437]: value = "task-2945179" [ 1314.428743] env[68437]: _type = "Task" [ 1314.428743] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.436623] env[68437]: DEBUG oslo_vmware.api [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2945179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.508859] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c4f346f1-0223-421b-bfaf-cac450785f31 tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.249s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.573484] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.573883] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.574153] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.604752] env[68437]: INFO nova.scheduler.client.report [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted allocations for instance 524c1b76-3563-482d-a676-26fa6c28a3c7 [ 1314.711073] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "7705f1c5-3b96-426c-9553-b67f2951825b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.711436] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.772232] env[68437]: DEBUG nova.compute.manager [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1314.772474] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1314.772677] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae0d425c-042e-4953-a23a-f4abad281937 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.779828] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1314.779828] env[68437]: value = "task-2945180" [ 1314.779828] env[68437]: _type = "Task" [ 1314.779828] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.789104] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945180, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.938026] env[68437]: DEBUG oslo_vmware.api [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2945179, 'name': PowerOffVM_Task, 'duration_secs': 0.186254} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.938341] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1314.938537] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1314.938803] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23898ffc-889d-4624-84f5-5a5cf85e600c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.000058] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1315.000296] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1315.000494] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleting the datastore file [datastore1] e2143e07-8c8d-4008-bb73-29aae91baee7 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1315.000749] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10404068-99e7-4674-8eb4-696576163c8c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.006766] env[68437]: DEBUG oslo_vmware.api [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for the task: (returnval){ [ 1315.006766] env[68437]: value = "task-2945182" [ 1315.006766] env[68437]: _type = "Task" [ 1315.006766] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.014863] env[68437]: DEBUG oslo_vmware.api [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2945182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.114361] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d3010b9-c9c6-4b39-bc15-4b393c6d70a6 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "524c1b76-3563-482d-a676-26fa6c28a3c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.121s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.214691] env[68437]: INFO nova.compute.manager [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Detaching volume 06ba57fb-3c82-4175-b717-ece486ba640e [ 1315.247015] env[68437]: INFO nova.virt.block_device [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Attempting to driver detach volume 06ba57fb-3c82-4175-b717-ece486ba640e from mountpoint /dev/sdb [ 1315.247280] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1315.247480] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591154', 'volume_id': '06ba57fb-3c82-4175-b717-ece486ba640e', 'name': 'volume-06ba57fb-3c82-4175-b717-ece486ba640e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7705f1c5-3b96-426c-9553-b67f2951825b', 'attached_at': '', 'detached_at': '', 'volume_id': '06ba57fb-3c82-4175-b717-ece486ba640e', 'serial': '06ba57fb-3c82-4175-b717-ece486ba640e'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1315.248390] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af96b2aa-fded-424f-9208-4f7de67ccd90 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.269954] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21ec38a-017e-42be-8c3e-9caab0b7681f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.276533] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18ce6a0-b065-429f-a021-4ac9c6422397 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.299893] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74a4ae7-5cab-406a-b160-e80b852e3816 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.305221] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945180, 'name': PowerOffVM_Task, 'duration_secs': 0.184609} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.305816] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1315.306033] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1315.306231] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591146', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'name': 'volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '0a246b14-5078-4549-a270-73f99a1647c7', 'attached_at': '2025-03-11T18:48:07.000000', 'detached_at': '', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'serial': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1315.306935] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3920edf1-ca3b-424b-b8d9-411ad120993f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.319128] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] The volume has not been displaced from its original location: [datastore2] volume-06ba57fb-3c82-4175-b717-ece486ba640e/volume-06ba57fb-3c82-4175-b717-ece486ba640e.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1315.324277] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1315.324854] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ca5baa3-4419-4cc3-a470-1791e034890c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.351700] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d1a28a-8720-4c5a-966d-953a5c9fe205 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.355186] env[68437]: DEBUG oslo_vmware.api [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1315.355186] env[68437]: value = "task-2945183" [ 1315.355186] env[68437]: _type = "Task" [ 1315.355186] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.360612] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299ab5ed-34f8-4c16-a5ea-73d8bfe4b5cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.365460] env[68437]: DEBUG oslo_vmware.api [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945183, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.381643] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9e4ee5-09a4-42b4-b89c-e22d6c3074fd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.397182] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] The volume has not been displaced from its original location: [datastore1] volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561/volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1315.402448] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1315.403113] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37d4c311-1110-4ec1-b796-738c6851e65c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.420087] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1315.420087] env[68437]: value = "task-2945184" [ 1315.420087] env[68437]: _type = "Task" [ 1315.420087] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.427716] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945184, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.517359] env[68437]: DEBUG oslo_vmware.api [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Task: {'id': task-2945182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135137} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.517625] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1315.517817] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1315.517997] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1315.518192] env[68437]: INFO nova.compute.manager [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1315.518443] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1315.518638] env[68437]: DEBUG nova.compute.manager [-] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1315.518734] env[68437]: DEBUG nova.network.neutron [-] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1315.869572] env[68437]: DEBUG oslo_vmware.api [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945183, 'name': ReconfigVM_Task, 'duration_secs': 0.201949} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.869953] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1315.882109] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a193dc9-611c-4f07-833e-a5119477dc16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.904023] env[68437]: DEBUG oslo_vmware.api [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1315.904023] env[68437]: value = "task-2945185" [ 1315.904023] env[68437]: _type = "Task" [ 1315.904023] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.912459] env[68437]: DEBUG oslo_vmware.api [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945185, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.930988] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945184, 'name': ReconfigVM_Task, 'duration_secs': 0.156873} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.931296] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1315.936580] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31f05658-7924-4b9e-bd0a-5819011c2a3d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.952178] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1315.952178] env[68437]: value = "task-2945186" [ 1315.952178] env[68437]: _type = "Task" [ 1315.952178] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.961333] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945186, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.978149] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "3562f6dc-2596-4878-96f5-1e0da54a168b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1315.978382] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.184875] env[68437]: DEBUG nova.compute.manager [req-36aabdfa-4f59-4e7d-9bb4-f793428e3f7d req-b543d44b-2cc1-418b-8e8a-e4318cf14b92 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Received event network-vif-deleted-a072479e-e965-4e09-a378-229474b176e6 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1316.185160] env[68437]: INFO nova.compute.manager [req-36aabdfa-4f59-4e7d-9bb4-f793428e3f7d req-b543d44b-2cc1-418b-8e8a-e4318cf14b92 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Neutron deleted interface a072479e-e965-4e09-a378-229474b176e6; detaching it from the instance and deleting it from the info cache [ 1316.185438] env[68437]: DEBUG nova.network.neutron [req-36aabdfa-4f59-4e7d-9bb4-f793428e3f7d req-b543d44b-2cc1-418b-8e8a-e4318cf14b92 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.414173] env[68437]: DEBUG oslo_vmware.api [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945185, 'name': ReconfigVM_Task, 'duration_secs': 0.170727} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.414498] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591154', 'volume_id': '06ba57fb-3c82-4175-b717-ece486ba640e', 'name': 'volume-06ba57fb-3c82-4175-b717-ece486ba640e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7705f1c5-3b96-426c-9553-b67f2951825b', 'attached_at': '', 'detached_at': '', 'volume_id': '06ba57fb-3c82-4175-b717-ece486ba640e', 'serial': '06ba57fb-3c82-4175-b717-ece486ba640e'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1316.461924] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945186, 'name': ReconfigVM_Task, 'duration_secs': 0.143156} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.462315] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591146', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'name': 'volume-d9bee34e-2dd4-4803-aafb-0bd871f3a561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '0a246b14-5078-4549-a270-73f99a1647c7', 'attached_at': '2025-03-11T18:48:07.000000', 'detached_at': '', 'volume_id': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561', 'serial': 'd9bee34e-2dd4-4803-aafb-0bd871f3a561'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1316.462602] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1316.463381] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9fec12-c81c-46df-8ec5-19839d198795 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.469749] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1316.469974] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b74e7e6e-9930-4b48-af64-31830597ac7b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.480865] env[68437]: DEBUG nova.compute.manager [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1316.530994] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1316.531235] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1316.531508] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleting the datastore file [datastore1] 0a246b14-5078-4549-a270-73f99a1647c7 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1316.531831] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdb40322-d52f-4b47-9d9e-a887d0b2dd92 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.538254] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1316.538254] env[68437]: value = "task-2945188" [ 1316.538254] env[68437]: _type = "Task" [ 1316.538254] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.545680] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945188, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.666664] env[68437]: DEBUG nova.network.neutron [-] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.688179] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a51134f5-4616-4e38-b4f8-3c8c090d4599 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.697633] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119b8667-f1e5-40cc-8e81-5970da9c8670 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.726053] env[68437]: DEBUG nova.compute.manager [req-36aabdfa-4f59-4e7d-9bb4-f793428e3f7d req-b543d44b-2cc1-418b-8e8a-e4318cf14b92 service nova] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Detach interface failed, port_id=a072479e-e965-4e09-a378-229474b176e6, reason: Instance e2143e07-8c8d-4008-bb73-29aae91baee7 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1316.958067] env[68437]: DEBUG nova.objects.instance [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'flavor' on Instance uuid 7705f1c5-3b96-426c-9553-b67f2951825b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1317.003276] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.003596] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.005686] env[68437]: INFO nova.compute.claims [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1317.051722] env[68437]: DEBUG oslo_vmware.api [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945188, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110011} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.052096] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1317.052341] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1317.052529] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1317.052715] env[68437]: INFO nova.compute.manager [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Took 2.28 seconds to destroy the instance on the hypervisor. [ 1317.052953] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1317.053161] env[68437]: DEBUG nova.compute.manager [-] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1317.053256] env[68437]: DEBUG nova.network.neutron [-] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1317.169070] env[68437]: INFO nova.compute.manager [-] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Took 1.65 seconds to deallocate network for instance. [ 1317.675645] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.966064] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2a15f7f6-7a1f-4c02-ab92-1fed31cf266d tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.254s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1317.984564] env[68437]: DEBUG nova.network.neutron [-] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.110484] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a39fa3-8c8c-46ca-ab3a-83990adb9497 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.117994] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a6d835-06b8-44c0-892c-6c75733b994f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.147270] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df73009-8a7e-4c71-aebe-4f827f07482a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.154269] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38375cbf-16aa-4be7-b270-c7b2f09c5fd9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.168448] env[68437]: DEBUG nova.compute.provider_tree [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.212412] env[68437]: DEBUG nova.compute.manager [req-b2b4a0c8-e712-4373-8bda-7848f61159b2 req-c670cf5e-6b29-41ec-83d4-6383a0686c5a service nova] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Received event network-vif-deleted-505ca14d-2a80-4b29-bd5c-9d991541e9ad {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1318.486706] env[68437]: INFO nova.compute.manager [-] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Took 1.43 seconds to deallocate network for instance. [ 1318.672059] env[68437]: DEBUG nova.scheduler.client.report [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1318.965455] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "7705f1c5-3b96-426c-9553-b67f2951825b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.965739] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.965953] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "7705f1c5-3b96-426c-9553-b67f2951825b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.966161] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.966394] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.968597] env[68437]: INFO nova.compute.manager [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Terminating instance [ 1319.025665] env[68437]: INFO nova.compute.manager [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Took 0.54 seconds to detach 1 volumes for instance. [ 1319.027657] env[68437]: DEBUG nova.compute.manager [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Deleting volume: d9bee34e-2dd4-4803-aafb-0bd871f3a561 {{(pid=68437) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1319.176080] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.172s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.176586] env[68437]: DEBUG nova.compute.manager [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1319.179342] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.504s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.179564] env[68437]: DEBUG nova.objects.instance [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lazy-loading 'resources' on Instance uuid e2143e07-8c8d-4008-bb73-29aae91baee7 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1319.472229] env[68437]: DEBUG nova.compute.manager [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1319.472475] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1319.473391] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad941bf-cb35-46c2-b115-5d3abe4ba43b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.480841] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1319.481072] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cacb9f10-59bc-4237-be38-62dafe9e11b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.488039] env[68437]: DEBUG oslo_vmware.api [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1319.488039] env[68437]: value = "task-2945190" [ 1319.488039] env[68437]: _type = "Task" [ 1319.488039] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.494968] env[68437]: DEBUG oslo_vmware.api [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945190, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.562291] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.601474] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.601820] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.602040] env[68437]: INFO nova.compute.manager [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Shelving [ 1319.682587] env[68437]: DEBUG nova.compute.utils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1319.687016] env[68437]: DEBUG nova.compute.manager [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1319.687195] env[68437]: DEBUG nova.network.neutron [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1319.724478] env[68437]: DEBUG nova.policy [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e4b1b3012874778bc147c3e7b00133c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6f6382f6c6843529a37d7c62837523a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1319.776560] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04046f3a-d971-4d53-86fe-3a19f5b15035 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.784195] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e7ca97-91fb-4cab-bf2f-094e75c1ae8a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.813695] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07157528-d174-4f6f-a76f-c88ce3834684 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.820599] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c867fbea-898d-4197-bf02-ba134e00bea2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.833369] env[68437]: DEBUG nova.compute.provider_tree [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.993366] env[68437]: DEBUG nova.network.neutron [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Successfully created port: e8fbb16d-1663-49af-b989-d8e689d060c7 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1320.000415] env[68437]: DEBUG oslo_vmware.api [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945190, 'name': PowerOffVM_Task, 'duration_secs': 0.181104} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.000678] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1320.000855] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1320.001111] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e362a98a-71da-409b-b2ff-0553ff64cf39 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.064585] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1320.064797] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1320.064976] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleting the datastore file [datastore1] 7705f1c5-3b96-426c-9553-b67f2951825b {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1320.065251] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ee241a6-8ca5-430e-bd35-e3cabd628e06 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.071278] env[68437]: DEBUG oslo_vmware.api [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for the task: (returnval){ [ 1320.071278] env[68437]: value = "task-2945192" [ 1320.071278] env[68437]: _type = "Task" [ 1320.071278] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.078949] env[68437]: DEBUG oslo_vmware.api [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945192, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.188189] env[68437]: DEBUG nova.compute.manager [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1320.336934] env[68437]: DEBUG nova.scheduler.client.report [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1320.582574] env[68437]: DEBUG oslo_vmware.api [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Task: {'id': task-2945192, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164733} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.583272] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1320.583379] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1320.583589] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1320.583821] env[68437]: INFO nova.compute.manager [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1320.584127] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1320.584344] env[68437]: DEBUG nova.compute.manager [-] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1320.584457] env[68437]: DEBUG nova.network.neutron [-] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1320.611134] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1320.611859] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f24d4a4-4a95-474b-ba05-b3eb294fcd3d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.620510] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1320.620510] env[68437]: value = "task-2945193" [ 1320.620510] env[68437]: _type = "Task" [ 1320.620510] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.628993] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.841624] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.662s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.844351] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.282s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.844613] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.865644] env[68437]: INFO nova.scheduler.client.report [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted allocations for instance 0a246b14-5078-4549-a270-73f99a1647c7 [ 1320.867407] env[68437]: INFO nova.scheduler.client.report [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Deleted allocations for instance e2143e07-8c8d-4008-bb73-29aae91baee7 [ 1321.018912] env[68437]: DEBUG nova.compute.manager [req-fa2d78bd-20e4-4c35-b540-e4aaa76f7500 req-a9d059f0-92d7-4cc5-850c-d846aa87f669 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Received event network-vif-deleted-e681ca7d-a952-4802-bfde-864f7a8362b2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1321.019151] env[68437]: INFO nova.compute.manager [req-fa2d78bd-20e4-4c35-b540-e4aaa76f7500 req-a9d059f0-92d7-4cc5-850c-d846aa87f669 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Neutron deleted interface e681ca7d-a952-4802-bfde-864f7a8362b2; detaching it from the instance and deleting it from the info cache [ 1321.019365] env[68437]: DEBUG nova.network.neutron [req-fa2d78bd-20e4-4c35-b540-e4aaa76f7500 req-a9d059f0-92d7-4cc5-850c-d846aa87f669 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.130945] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945193, 'name': PowerOffVM_Task, 'duration_secs': 0.223748} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.133326] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1321.133326] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6c65f8-9968-4e58-928d-6c0c56b9c05d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.152640] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d269c7c7-2d6a-4bc8-9aa4-47c53f0cbe76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.198722] env[68437]: DEBUG nova.compute.manager [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1321.230216] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1321.230634] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1321.230634] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1321.230804] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1321.230951] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1321.231123] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1321.231342] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1321.231500] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1321.231777] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1321.231859] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1321.232030] env[68437]: DEBUG nova.virt.hardware [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1321.232887] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfabb58-bff2-4b34-9229-50cc4198bc9f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.241149] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471d66c7-a791-4df4-bd16-3edc96c9a10b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.378446] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b032a084-7c92-4a3e-97bb-e88102b88e93 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "0a246b14-5078-4549-a270-73f99a1647c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.113s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.382246] env[68437]: DEBUG oslo_concurrency.lockutils [None req-cf8ae377-afbc-4bb2-8448-24602b870ce9 tempest-AttachInterfacesTestJSON-564155145 tempest-AttachInterfacesTestJSON-564155145-project-member] Lock "e2143e07-8c8d-4008-bb73-29aae91baee7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.481s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.504188] env[68437]: DEBUG nova.network.neutron [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Successfully updated port: e8fbb16d-1663-49af-b989-d8e689d060c7 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1321.504188] env[68437]: DEBUG nova.network.neutron [-] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.526830] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb806d53-00ed-411f-a950-9fcc8d34b377 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.542254] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8b1f2b-8f15-4d88-b33b-52d5d796123b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.570798] env[68437]: DEBUG nova.compute.manager [req-fa2d78bd-20e4-4c35-b540-e4aaa76f7500 req-a9d059f0-92d7-4cc5-850c-d846aa87f669 service nova] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Detach interface failed, port_id=e681ca7d-a952-4802-bfde-864f7a8362b2, reason: Instance 7705f1c5-3b96-426c-9553-b67f2951825b could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1321.661975] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1321.662371] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e18257fe-22e4-4f80-9171-d1f992267d08 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.670056] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1321.670056] env[68437]: value = "task-2945194" [ 1321.670056] env[68437]: _type = "Task" [ 1321.670056] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.680366] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945194, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.010046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.010046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1322.010046] env[68437]: DEBUG nova.network.neutron [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1322.010046] env[68437]: INFO nova.compute.manager [-] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Took 1.43 seconds to deallocate network for instance. [ 1322.032697] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "ede62837-4ff5-44be-a015-9ea06b9126a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.033124] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.033204] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "ede62837-4ff5-44be-a015-9ea06b9126a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.033382] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.033551] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.036013] env[68437]: INFO nova.compute.manager [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Terminating instance [ 1322.180402] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945194, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.515952] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.516260] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.516496] env[68437]: DEBUG nova.objects.instance [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lazy-loading 'resources' on Instance uuid 7705f1c5-3b96-426c-9553-b67f2951825b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1322.540464] env[68437]: DEBUG nova.compute.manager [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1322.540464] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1322.541441] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9b7283-2f91-45ff-9c6b-2dd3e7d5569c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.549954] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1322.550237] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbe6cdaf-1eb8-491b-969c-ce12d5c04b01 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.552409] env[68437]: DEBUG nova.network.neutron [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1322.558987] env[68437]: DEBUG oslo_vmware.api [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1322.558987] env[68437]: value = "task-2945195" [ 1322.558987] env[68437]: _type = "Task" [ 1322.558987] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.568786] env[68437]: DEBUG oslo_vmware.api [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945195, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.681692] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945194, 'name': CreateSnapshot_Task, 'duration_secs': 0.715307} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.681989] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1322.682838] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d40ae7-041a-45ea-a7cb-174ab831f1d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.772547] env[68437]: DEBUG nova.network.neutron [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Updating instance_info_cache with network_info: [{"id": "e8fbb16d-1663-49af-b989-d8e689d060c7", "address": "fa:16:3e:3f:15:6d", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8fbb16d-16", "ovs_interfaceid": "e8fbb16d-1663-49af-b989-d8e689d060c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.051900] env[68437]: DEBUG nova.compute.manager [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Received event network-vif-plugged-e8fbb16d-1663-49af-b989-d8e689d060c7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1323.051900] env[68437]: DEBUG oslo_concurrency.lockutils [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] Acquiring lock "3562f6dc-2596-4878-96f5-1e0da54a168b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.051900] env[68437]: DEBUG oslo_concurrency.lockutils [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.051900] env[68437]: DEBUG oslo_concurrency.lockutils [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.052268] env[68437]: DEBUG nova.compute.manager [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] No waiting events found dispatching network-vif-plugged-e8fbb16d-1663-49af-b989-d8e689d060c7 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1323.052326] env[68437]: WARNING nova.compute.manager [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Received unexpected event network-vif-plugged-e8fbb16d-1663-49af-b989-d8e689d060c7 for instance with vm_state building and task_state spawning. [ 1323.052539] env[68437]: DEBUG nova.compute.manager [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Received event network-changed-e8fbb16d-1663-49af-b989-d8e689d060c7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1323.052792] env[68437]: DEBUG nova.compute.manager [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Refreshing instance network info cache due to event network-changed-e8fbb16d-1663-49af-b989-d8e689d060c7. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1323.052858] env[68437]: DEBUG oslo_concurrency.lockutils [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] Acquiring lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.069414] env[68437]: DEBUG oslo_vmware.api [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945195, 'name': PowerOffVM_Task, 'duration_secs': 0.196397} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.072015] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1323.072219] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1323.072668] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0428d40a-5fb4-4ffd-9d22-c1da0f28f5de {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.123998] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3256553b-47a5-4505-8b62-26931c3d6d57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.132155] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae304296-4c8b-42d5-a4d5-4b97e23493b2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.136457] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1323.136666] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1323.136842] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleting the datastore file [datastore1] ede62837-4ff5-44be-a015-9ea06b9126a5 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1323.137435] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b64d6fd6-cc06-4bec-8aa3-9b82b2f6361a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.164435] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0289bcd2-5321-4de9-8bc0-4366e57f94c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.168428] env[68437]: DEBUG oslo_vmware.api [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1323.168428] env[68437]: value = "task-2945197" [ 1323.168428] env[68437]: _type = "Task" [ 1323.168428] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.175752] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd1fa3f-2727-42ed-89b1-9b7c71d0abed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.183208] env[68437]: DEBUG oslo_vmware.api [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.192456] env[68437]: DEBUG nova.compute.provider_tree [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1323.201971] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1323.202310] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bb5acde9-5030-4c59-99af-4425b526dd3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.209975] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1323.209975] env[68437]: value = "task-2945198" [ 1323.209975] env[68437]: _type = "Task" [ 1323.209975] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.219679] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945198, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.275572] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.275929] env[68437]: DEBUG nova.compute.manager [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Instance network_info: |[{"id": "e8fbb16d-1663-49af-b989-d8e689d060c7", "address": "fa:16:3e:3f:15:6d", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8fbb16d-16", "ovs_interfaceid": "e8fbb16d-1663-49af-b989-d8e689d060c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1323.276295] env[68437]: DEBUG oslo_concurrency.lockutils [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] Acquired lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.276487] env[68437]: DEBUG nova.network.neutron [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Refreshing network info cache for port e8fbb16d-1663-49af-b989-d8e689d060c7 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1323.278788] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:15:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8fbb16d-1663-49af-b989-d8e689d060c7', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1323.285341] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1323.286313] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1323.286560] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6fd4352c-63d5-4b6f-800f-ffaa768e6468 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.305938] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1323.305938] env[68437]: value = "task-2945199" [ 1323.305938] env[68437]: _type = "Task" [ 1323.305938] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.313930] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945199, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.679178] env[68437]: DEBUG oslo_vmware.api [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142988} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.679447] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1323.679631] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1323.679820] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1323.680054] env[68437]: INFO nova.compute.manager [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1323.680330] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1323.680571] env[68437]: DEBUG nova.compute.manager [-] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1323.680659] env[68437]: DEBUG nova.network.neutron [-] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1323.696125] env[68437]: DEBUG nova.scheduler.client.report [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1323.721551] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945198, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.816551] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945199, 'name': CreateVM_Task, 'duration_secs': 0.489056} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.816783] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1323.820877] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.820877] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.820877] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1323.820877] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba83502c-f974-4ffd-8cc4-c3b2f47d6c13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.822784] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1323.822784] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f62f4d-9719-226c-4cb2-c42f7e601922" [ 1323.822784] env[68437]: _type = "Task" [ 1323.822784] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.830542] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f62f4d-9719-226c-4cb2-c42f7e601922, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.093922] env[68437]: DEBUG nova.network.neutron [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Updated VIF entry in instance network info cache for port e8fbb16d-1663-49af-b989-d8e689d060c7. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1324.094357] env[68437]: DEBUG nova.network.neutron [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Updating instance_info_cache with network_info: [{"id": "e8fbb16d-1663-49af-b989-d8e689d060c7", "address": "fa:16:3e:3f:15:6d", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8fbb16d-16", "ovs_interfaceid": "e8fbb16d-1663-49af-b989-d8e689d060c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.201392] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.221172] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945198, 'name': CloneVM_Task, 'duration_secs': 0.946092} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.221782] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Created linked-clone VM from snapshot [ 1324.222502] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabdaf7b-675e-48f5-8ba0-f30868695613 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.232521] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Uploading image 706b952b-a1ff-4ca2-a65f-b55624309a9e {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1324.235157] env[68437]: INFO nova.scheduler.client.report [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Deleted allocations for instance 7705f1c5-3b96-426c-9553-b67f2951825b [ 1324.257798] env[68437]: DEBUG oslo_vmware.rw_handles [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1324.257798] env[68437]: value = "vm-591156" [ 1324.257798] env[68437]: _type = "VirtualMachine" [ 1324.257798] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1324.258081] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-277ad6f2-82d4-479d-880e-53a0274acbb7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.265516] env[68437]: DEBUG oslo_vmware.rw_handles [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lease: (returnval){ [ 1324.265516] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8c209-5a09-fe6e-5581-25cba8d36a66" [ 1324.265516] env[68437]: _type = "HttpNfcLease" [ 1324.265516] env[68437]: } obtained for exporting VM: (result){ [ 1324.265516] env[68437]: value = "vm-591156" [ 1324.265516] env[68437]: _type = "VirtualMachine" [ 1324.265516] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1324.265895] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the lease: (returnval){ [ 1324.265895] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8c209-5a09-fe6e-5581-25cba8d36a66" [ 1324.265895] env[68437]: _type = "HttpNfcLease" [ 1324.265895] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1324.272102] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1324.272102] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8c209-5a09-fe6e-5581-25cba8d36a66" [ 1324.272102] env[68437]: _type = "HttpNfcLease" [ 1324.272102] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1324.333486] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52f62f4d-9719-226c-4cb2-c42f7e601922, 'name': SearchDatastore_Task, 'duration_secs': 0.009471} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.333803] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.334046] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1324.334346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.334517] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1324.334735] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1324.335010] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99af8e6e-90c8-46c1-a46c-edd3187e2f3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.347382] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1324.347555] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1324.348292] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea3755a4-7589-40b3-9682-0623eb63fc78 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.354220] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1324.354220] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523e8000-d344-5458-7e4a-461dfa24a134" [ 1324.354220] env[68437]: _type = "Task" [ 1324.354220] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.362152] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523e8000-d344-5458-7e4a-461dfa24a134, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.573665] env[68437]: DEBUG nova.network.neutron [-] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.596921] env[68437]: DEBUG oslo_concurrency.lockutils [req-69125858-2f57-401b-8a37-209cbb7f84e7 req-250b9f3e-6cf5-4300-99c9-19b31cd2a572 service nova] Releasing lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.742437] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a7832683-6104-4030-9133-09d65d1bfe6e tempest-AttachVolumeNegativeTest-1237161712 tempest-AttachVolumeNegativeTest-1237161712-project-member] Lock "7705f1c5-3b96-426c-9553-b67f2951825b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.777s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.773935] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1324.773935] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8c209-5a09-fe6e-5581-25cba8d36a66" [ 1324.773935] env[68437]: _type = "HttpNfcLease" [ 1324.773935] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1324.774452] env[68437]: DEBUG oslo_vmware.rw_handles [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1324.774452] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e8c209-5a09-fe6e-5581-25cba8d36a66" [ 1324.774452] env[68437]: _type = "HttpNfcLease" [ 1324.774452] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1324.775221] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780fb4bc-d65f-43af-b507-dfe9bb266136 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.782976] env[68437]: DEBUG oslo_vmware.rw_handles [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ef49b4-2313-92bb-91fd-2df01db6f8e4/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1324.783197] env[68437]: DEBUG oslo_vmware.rw_handles [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ef49b4-2313-92bb-91fd-2df01db6f8e4/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1324.865572] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523e8000-d344-5458-7e4a-461dfa24a134, 'name': SearchDatastore_Task, 'duration_secs': 0.00847} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.866375] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d3e6540-e3c4-4f4d-96fc-63f420723bb0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.871840] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1324.871840] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52777724-4bc5-d4f4-c8f4-41872927cdcf" [ 1324.871840] env[68437]: _type = "Task" [ 1324.871840] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.879967] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52777724-4bc5-d4f4-c8f4-41872927cdcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.884276] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-37a74863-deef-4220-a925-3f466e435975 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.077206] env[68437]: INFO nova.compute.manager [-] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Took 1.40 seconds to deallocate network for instance. [ 1325.091338] env[68437]: DEBUG nova.compute.manager [req-97b42a5b-5e19-4583-9f4b-94cc100eb991 req-854ad360-528a-4b90-b358-a9867e1ea6a1 service nova] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Received event network-vif-deleted-bce31050-7ee6-4cbb-8b64-b2bf9be3c649 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1325.384258] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52777724-4bc5-d4f4-c8f4-41872927cdcf, 'name': SearchDatastore_Task, 'duration_secs': 0.011592} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.384679] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1325.384913] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 3562f6dc-2596-4878-96f5-1e0da54a168b/3562f6dc-2596-4878-96f5-1e0da54a168b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1325.386105] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b6aa783-bc72-45e7-928c-4ddfebee7635 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.392708] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1325.392708] env[68437]: value = "task-2945201" [ 1325.392708] env[68437]: _type = "Task" [ 1325.392708] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.406546] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.593271] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1325.593661] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1325.594127] env[68437]: DEBUG nova.objects.instance [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lazy-loading 'resources' on Instance uuid ede62837-4ff5-44be-a015-9ea06b9126a5 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1325.903268] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945201, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.170387] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c00d40-c9c8-469a-97ff-6c7dfee5ad35 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.178095] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcfc519-ef24-407e-baa0-a218408031ee {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.212598] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63cb482f-362e-428b-a74d-b76da56d5b99 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.220362] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e4f863-1d11-4536-81e3-31e5d19f3ab9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.234122] env[68437]: DEBUG nova.compute.provider_tree [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.405337] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945201, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52173} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.406155] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 3562f6dc-2596-4878-96f5-1e0da54a168b/3562f6dc-2596-4878-96f5-1e0da54a168b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1326.406563] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1326.407056] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1239c85a-e076-45a2-ae9c-6bbab6cbe72e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.416063] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1326.416063] env[68437]: value = "task-2945203" [ 1326.416063] env[68437]: _type = "Task" [ 1326.416063] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.429561] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.738023] env[68437]: DEBUG nova.scheduler.client.report [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1326.928014] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167672} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.928381] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1326.929347] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229b33df-26fe-4486-b722-8f4d99063131 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.952161] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 3562f6dc-2596-4878-96f5-1e0da54a168b/3562f6dc-2596-4878-96f5-1e0da54a168b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1326.952560] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d99205d-e059-441b-9241-d336ef72e30a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.975116] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1326.975116] env[68437]: value = "task-2945204" [ 1326.975116] env[68437]: _type = "Task" [ 1326.975116] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.984298] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945204, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.245878] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.652s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.272787] env[68437]: INFO nova.scheduler.client.report [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted allocations for instance ede62837-4ff5-44be-a015-9ea06b9126a5 [ 1327.485499] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945204, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.780492] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b87b3cb4-70eb-4acb-8373-e96e6fe6f16f tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "ede62837-4ff5-44be-a015-9ea06b9126a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.747s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1327.975751] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquiring lock "88191506-b278-4502-b72d-07169f4fd6a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1327.975921] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "88191506-b278-4502-b72d-07169f4fd6a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1327.988254] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945204, 'name': ReconfigVM_Task, 'duration_secs': 0.954985} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.989831] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 3562f6dc-2596-4878-96f5-1e0da54a168b/3562f6dc-2596-4878-96f5-1e0da54a168b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1327.990566] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14c1b7de-51d2-4176-9ec2-d48afafa893a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.997225] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1327.997225] env[68437]: value = "task-2945206" [ 1327.997225] env[68437]: _type = "Task" [ 1327.997225] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.006540] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945206, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.483233] env[68437]: DEBUG nova.compute.manager [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1328.507073] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945206, 'name': Rename_Task, 'duration_secs': 0.167093} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.507428] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1328.507647] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2caa5fc4-a7d5-4f66-ae79-9ec73f2177ad {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.514306] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1328.514306] env[68437]: value = "task-2945208" [ 1328.514306] env[68437]: _type = "Task" [ 1328.514306] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.522433] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945208, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.007912] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.008245] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.009919] env[68437]: INFO nova.compute.claims [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1329.024191] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945208, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.032800] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.231677] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.231677] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.231677] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.231862] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1329.526696] env[68437]: DEBUG oslo_vmware.api [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945208, 'name': PowerOnVM_Task, 'duration_secs': 0.617837} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.527680] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1329.527885] env[68437]: INFO nova.compute.manager [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Took 8.33 seconds to spawn the instance on the hypervisor. [ 1329.528080] env[68437]: DEBUG nova.compute.manager [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1329.528876] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8929d46f-80b2-4a89-8318-285818404a21 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.045792] env[68437]: INFO nova.compute.manager [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Took 13.06 seconds to build instance. [ 1330.092679] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0aff409-ff6b-45aa-9638-ac42a9195c8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.101260] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534901ff-178f-476f-b3dc-de9e83a942d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.131634] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e3e589-1ade-4af5-975d-4bfdb405ba2e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.139280] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353db956-ec99-4e00-9f5e-ab7b38738858 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.152871] env[68437]: DEBUG nova.compute.provider_tree [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1330.226647] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.323539] env[68437]: DEBUG nova.compute.manager [req-3a04d913-1c7b-4279-9e6c-d60ac503eb96 req-0f6f6a0b-678d-4403-815d-5025a2134ec8 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Received event network-changed-e8fbb16d-1663-49af-b989-d8e689d060c7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1330.323833] env[68437]: DEBUG nova.compute.manager [req-3a04d913-1c7b-4279-9e6c-d60ac503eb96 req-0f6f6a0b-678d-4403-815d-5025a2134ec8 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Refreshing instance network info cache due to event network-changed-e8fbb16d-1663-49af-b989-d8e689d060c7. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1330.325075] env[68437]: DEBUG oslo_concurrency.lockutils [req-3a04d913-1c7b-4279-9e6c-d60ac503eb96 req-0f6f6a0b-678d-4403-815d-5025a2134ec8 service nova] Acquiring lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.325075] env[68437]: DEBUG oslo_concurrency.lockutils [req-3a04d913-1c7b-4279-9e6c-d60ac503eb96 req-0f6f6a0b-678d-4403-815d-5025a2134ec8 service nova] Acquired lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1330.325242] env[68437]: DEBUG nova.network.neutron [req-3a04d913-1c7b-4279-9e6c-d60ac503eb96 req-0f6f6a0b-678d-4403-815d-5025a2134ec8 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Refreshing network info cache for port e8fbb16d-1663-49af-b989-d8e689d060c7 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1330.551705] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4414ab08-2118-41d5-8ea5-03ce421cd2f3 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.573s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.656083] env[68437]: DEBUG nova.scheduler.client.report [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1330.733607] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.831792] env[68437]: DEBUG oslo_concurrency.lockutils [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "353ebb37-7e69-49d4-873e-2272cbfff6e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.832220] env[68437]: DEBUG oslo_concurrency.lockutils [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.832398] env[68437]: DEBUG oslo_concurrency.lockutils [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "353ebb37-7e69-49d4-873e-2272cbfff6e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.832620] env[68437]: DEBUG oslo_concurrency.lockutils [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.832812] env[68437]: DEBUG oslo_concurrency.lockutils [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.835045] env[68437]: INFO nova.compute.manager [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Terminating instance [ 1331.057359] env[68437]: DEBUG nova.network.neutron [req-3a04d913-1c7b-4279-9e6c-d60ac503eb96 req-0f6f6a0b-678d-4403-815d-5025a2134ec8 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Updated VIF entry in instance network info cache for port e8fbb16d-1663-49af-b989-d8e689d060c7. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1331.057717] env[68437]: DEBUG nova.network.neutron [req-3a04d913-1c7b-4279-9e6c-d60ac503eb96 req-0f6f6a0b-678d-4403-815d-5025a2134ec8 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Updating instance_info_cache with network_info: [{"id": "e8fbb16d-1663-49af-b989-d8e689d060c7", "address": "fa:16:3e:3f:15:6d", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8fbb16d-16", "ovs_interfaceid": "e8fbb16d-1663-49af-b989-d8e689d060c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.161575] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.153s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.162345] env[68437]: DEBUG nova.compute.manager [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1331.338716] env[68437]: DEBUG nova.compute.manager [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1331.338900] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1331.340173] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768df9a3-5367-4943-9554-85ee5d6b76ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.347739] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1331.347963] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79483a2c-27f6-4bef-ae5a-46687351982d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.354586] env[68437]: DEBUG oslo_vmware.api [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1331.354586] env[68437]: value = "task-2945210" [ 1331.354586] env[68437]: _type = "Task" [ 1331.354586] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.362350] env[68437]: DEBUG oslo_vmware.api [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945210, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.561127] env[68437]: DEBUG oslo_concurrency.lockutils [req-3a04d913-1c7b-4279-9e6c-d60ac503eb96 req-0f6f6a0b-678d-4403-815d-5025a2134ec8 service nova] Releasing lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1331.669039] env[68437]: DEBUG nova.compute.utils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1331.670638] env[68437]: DEBUG nova.compute.manager [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1331.670830] env[68437]: DEBUG nova.network.neutron [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1331.716832] env[68437]: DEBUG nova.policy [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b69484848fd4c408d96e456b030f018', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5e76db8ccf4b4e6fa4028ffa815ff3ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1331.864758] env[68437]: DEBUG oslo_vmware.api [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945210, 'name': PowerOffVM_Task, 'duration_secs': 0.387706} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.865090] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1331.865274] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1331.865553] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6ba490a-1e4a-4941-b8f6-3841d72b7173 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.933252] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1331.933570] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1331.933861] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleting the datastore file [datastore1] 353ebb37-7e69-49d4-873e-2272cbfff6e8 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1331.934178] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1905576-c6db-44d6-9c5b-568aa3c82c56 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.943021] env[68437]: DEBUG oslo_vmware.api [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for the task: (returnval){ [ 1331.943021] env[68437]: value = "task-2945212" [ 1331.943021] env[68437]: _type = "Task" [ 1331.943021] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.952390] env[68437]: DEBUG oslo_vmware.api [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.999628] env[68437]: DEBUG nova.network.neutron [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Successfully created port: 59123504-f773-43df-9389-0f86ff9b7e52 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1332.174268] env[68437]: DEBUG nova.compute.manager [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1332.451431] env[68437]: DEBUG oslo_vmware.api [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Task: {'id': task-2945212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.393283} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.451702] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1332.451863] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1332.452053] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1332.452265] env[68437]: INFO nova.compute.manager [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1332.452519] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1332.452715] env[68437]: DEBUG nova.compute.manager [-] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1332.452808] env[68437]: DEBUG nova.network.neutron [-] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1332.840992] env[68437]: DEBUG nova.compute.manager [req-723ff47e-e7a4-4fda-a35d-96d688ae44ef req-74346ba1-dd25-4b55-ade7-7c264ac67903 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Received event network-vif-deleted-28d9f2cf-baaf-4817-acdb-525b41381e45 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1332.841234] env[68437]: INFO nova.compute.manager [req-723ff47e-e7a4-4fda-a35d-96d688ae44ef req-74346ba1-dd25-4b55-ade7-7c264ac67903 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Neutron deleted interface 28d9f2cf-baaf-4817-acdb-525b41381e45; detaching it from the instance and deleting it from the info cache [ 1332.841389] env[68437]: DEBUG nova.network.neutron [req-723ff47e-e7a4-4fda-a35d-96d688ae44ef req-74346ba1-dd25-4b55-ade7-7c264ac67903 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.184909] env[68437]: DEBUG nova.compute.manager [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1333.208110] env[68437]: DEBUG nova.network.neutron [-] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.211349] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1333.211603] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1333.211765] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1333.211953] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1333.212135] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1333.212304] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1333.212508] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1333.212670] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1333.212838] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1333.213121] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1333.213787] env[68437]: DEBUG nova.virt.hardware [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1333.214314] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571dbc9a-5e96-434a-b0f3-49abc3f8e066 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.224414] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630f2cb7-b7ce-4b60-af7d-01e30e923403 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.237927] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.238593] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.344383] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6b8823b-db25-49e7-9fd4-cfc129ebe2a6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.357151] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b08ea5-ca39-4dcb-8031-7d4b473f75e1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.393429] env[68437]: DEBUG nova.compute.manager [req-723ff47e-e7a4-4fda-a35d-96d688ae44ef req-74346ba1-dd25-4b55-ade7-7c264ac67903 service nova] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Detach interface failed, port_id=28d9f2cf-baaf-4817-acdb-525b41381e45, reason: Instance 353ebb37-7e69-49d4-873e-2272cbfff6e8 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1333.450081] env[68437]: DEBUG nova.network.neutron [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Successfully updated port: 59123504-f773-43df-9389-0f86ff9b7e52 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1333.718213] env[68437]: INFO nova.compute.manager [-] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Took 1.27 seconds to deallocate network for instance. [ 1333.953532] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquiring lock "refresh_cache-88191506-b278-4502-b72d-07169f4fd6a6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.953812] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquired lock "refresh_cache-88191506-b278-4502-b72d-07169f4fd6a6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1333.953864] env[68437]: DEBUG nova.network.neutron [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1334.225657] env[68437]: DEBUG oslo_concurrency.lockutils [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.226046] env[68437]: DEBUG oslo_concurrency.lockutils [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.226365] env[68437]: DEBUG nova.objects.instance [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lazy-loading 'resources' on Instance uuid 353ebb37-7e69-49d4-873e-2272cbfff6e8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1334.230024] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.489028] env[68437]: DEBUG nova.network.neutron [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1334.626161] env[68437]: DEBUG nova.network.neutron [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Updating instance_info_cache with network_info: [{"id": "59123504-f773-43df-9389-0f86ff9b7e52", "address": "fa:16:3e:6a:aa:42", "network": {"id": "37c0e28f-b9ac-482c-a2a6-bee79d99c1a1", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1238628437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e76db8ccf4b4e6fa4028ffa815ff3ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8edfde4-5a99-4745-956d-04da82ab1b85", "external-id": "nsx-vlan-transportzone-519", "segmentation_id": 519, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59123504-f7", "ovs_interfaceid": "59123504-f773-43df-9389-0f86ff9b7e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.734056] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.762645] env[68437]: DEBUG oslo_vmware.rw_handles [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ef49b4-2313-92bb-91fd-2df01db6f8e4/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1334.763730] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e635b9-1bdd-4c5f-9ce9-321ebd0e9c04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.773327] env[68437]: DEBUG oslo_vmware.rw_handles [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ef49b4-2313-92bb-91fd-2df01db6f8e4/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1334.773528] env[68437]: ERROR oslo_vmware.rw_handles [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ef49b4-2313-92bb-91fd-2df01db6f8e4/disk-0.vmdk due to incomplete transfer. [ 1334.773775] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6f627bf6-4196-4e6f-ac8c-13b933e74b40 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.784108] env[68437]: DEBUG oslo_vmware.rw_handles [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ef49b4-2313-92bb-91fd-2df01db6f8e4/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1334.784108] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Uploaded image 706b952b-a1ff-4ca2-a65f-b55624309a9e to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1334.784108] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1334.784432] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5f33ab38-80d0-4916-ab56-79a2b70d349a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.789959] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1334.789959] env[68437]: value = "task-2945213" [ 1334.789959] env[68437]: _type = "Task" [ 1334.789959] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.802022] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945213, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.804225] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4963bb71-8040-4cb0-b6c0-eecc36ea86ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.810869] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e5cf3a-73b7-469a-9216-4d729209767d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.841972] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39baf00-da24-4f07-a7b1-5bf071309390 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.849582] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b0bc75-23cd-4b8a-b708-bf01f3f6e755 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.864093] env[68437]: DEBUG nova.compute.provider_tree [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1334.870770] env[68437]: DEBUG nova.compute.manager [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Received event network-vif-plugged-59123504-f773-43df-9389-0f86ff9b7e52 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1334.870984] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] Acquiring lock "88191506-b278-4502-b72d-07169f4fd6a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.871199] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] Lock "88191506-b278-4502-b72d-07169f4fd6a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.871364] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] Lock "88191506-b278-4502-b72d-07169f4fd6a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.871574] env[68437]: DEBUG nova.compute.manager [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] No waiting events found dispatching network-vif-plugged-59123504-f773-43df-9389-0f86ff9b7e52 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1334.871890] env[68437]: WARNING nova.compute.manager [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Received unexpected event network-vif-plugged-59123504-f773-43df-9389-0f86ff9b7e52 for instance with vm_state building and task_state spawning. [ 1334.872024] env[68437]: DEBUG nova.compute.manager [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Received event network-changed-59123504-f773-43df-9389-0f86ff9b7e52 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1334.872194] env[68437]: DEBUG nova.compute.manager [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Refreshing instance network info cache due to event network-changed-59123504-f773-43df-9389-0f86ff9b7e52. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1334.872409] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] Acquiring lock "refresh_cache-88191506-b278-4502-b72d-07169f4fd6a6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.130835] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Releasing lock "refresh_cache-88191506-b278-4502-b72d-07169f4fd6a6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.131113] env[68437]: DEBUG nova.compute.manager [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Instance network_info: |[{"id": "59123504-f773-43df-9389-0f86ff9b7e52", "address": "fa:16:3e:6a:aa:42", "network": {"id": "37c0e28f-b9ac-482c-a2a6-bee79d99c1a1", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1238628437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e76db8ccf4b4e6fa4028ffa815ff3ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8edfde4-5a99-4745-956d-04da82ab1b85", "external-id": "nsx-vlan-transportzone-519", "segmentation_id": 519, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59123504-f7", "ovs_interfaceid": "59123504-f773-43df-9389-0f86ff9b7e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1335.131691] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] Acquired lock "refresh_cache-88191506-b278-4502-b72d-07169f4fd6a6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1335.131886] env[68437]: DEBUG nova.network.neutron [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Refreshing network info cache for port 59123504-f773-43df-9389-0f86ff9b7e52 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1335.133140] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:aa:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8edfde4-5a99-4745-956d-04da82ab1b85', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59123504-f773-43df-9389-0f86ff9b7e52', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1335.140698] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Creating folder: Project (5e76db8ccf4b4e6fa4028ffa815ff3ec). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.144022] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b498cb3-620b-4ac2-b4c6-4e9ba922ed54 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.155972] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Created folder: Project (5e76db8ccf4b4e6fa4028ffa815ff3ec) in parent group-v590848. [ 1335.156182] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Creating folder: Instances. Parent ref: group-v591158. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.156535] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-542865ff-1d2d-4fad-b6ac-e938409478dd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.165930] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Created folder: Instances in parent group-v591158. [ 1335.166189] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1335.166409] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1335.166629] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82ea82d3-371a-4028-89f7-2da89f8bcde5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.189654] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1335.189654] env[68437]: value = "task-2945216" [ 1335.189654] env[68437]: _type = "Task" [ 1335.189654] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.197524] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945216, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.303734] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945213, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.352755] env[68437]: DEBUG nova.network.neutron [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Updated VIF entry in instance network info cache for port 59123504-f773-43df-9389-0f86ff9b7e52. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1335.353126] env[68437]: DEBUG nova.network.neutron [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Updating instance_info_cache with network_info: [{"id": "59123504-f773-43df-9389-0f86ff9b7e52", "address": "fa:16:3e:6a:aa:42", "network": {"id": "37c0e28f-b9ac-482c-a2a6-bee79d99c1a1", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1238628437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e76db8ccf4b4e6fa4028ffa815ff3ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8edfde4-5a99-4745-956d-04da82ab1b85", "external-id": "nsx-vlan-transportzone-519", "segmentation_id": 519, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59123504-f7", "ovs_interfaceid": "59123504-f773-43df-9389-0f86ff9b7e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.366927] env[68437]: DEBUG nova.scheduler.client.report [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1335.699674] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945216, 'name': CreateVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.803024] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945213, 'name': Destroy_Task} progress is 33%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.856893] env[68437]: DEBUG oslo_concurrency.lockutils [req-2bf23696-2afd-4e00-b1df-ccf4f1833d23 req-824f1eaf-cff4-44c2-8a65-554c891e5d96 service nova] Releasing lock "refresh_cache-88191506-b278-4502-b72d-07169f4fd6a6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.871888] env[68437]: DEBUG oslo_concurrency.lockutils [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.646s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1335.874442] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.140s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.874591] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1335.875767] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1335.875767] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a91f402-1a0a-4ac5-9217-e54c2f8ce029 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.883040] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a3fa8a-8029-466e-8b86-5a3675dc86af {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.897273] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b52803-ccae-4695-b902-17d80142fc41 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.900515] env[68437]: INFO nova.scheduler.client.report [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Deleted allocations for instance 353ebb37-7e69-49d4-873e-2272cbfff6e8 [ 1335.906143] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7f1284-734e-4341-bc2f-5ff4f6ba7658 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.939410] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179721MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1335.939576] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.939890] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1336.200218] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945216, 'name': CreateVM_Task, 'duration_secs': 0.604134} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.200473] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1336.201077] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.201254] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.201610] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1336.201865] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ffb22d0-7afd-40d1-8f84-6c5f568115a5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.206125] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1336.206125] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523340c6-1d25-a552-3b67-d91b5e4af24f" [ 1336.206125] env[68437]: _type = "Task" [ 1336.206125] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.213323] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523340c6-1d25-a552-3b67-d91b5e4af24f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.303523] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945213, 'name': Destroy_Task, 'duration_secs': 1.0802} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.303713] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Destroyed the VM [ 1336.303946] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1336.304198] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6e69bc01-f289-4028-b60a-7b777c253746 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.310287] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1336.310287] env[68437]: value = "task-2945217" [ 1336.310287] env[68437]: _type = "Task" [ 1336.310287] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.318948] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945217, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.416450] env[68437]: DEBUG oslo_concurrency.lockutils [None req-77555965-f32c-4a4c-b6ab-45b2858c86f7 tempest-ServerActionsTestOtherA-1105233648 tempest-ServerActionsTestOtherA-1105233648-project-member] Lock "353ebb37-7e69-49d4-873e-2272cbfff6e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.584s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.716313] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523340c6-1d25-a552-3b67-d91b5e4af24f, 'name': SearchDatastore_Task, 'duration_secs': 0.010896} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.716583] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1336.717193] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1336.717193] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.717193] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.717380] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1336.717613] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-913fffd3-f672-4397-a85e-f24c25e07a3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.724776] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1336.724953] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1336.725702] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e488bd62-f6fb-4c77-9534-799596d64ab3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.730311] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1336.730311] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52333406-e768-6517-b849-049f5abb94c3" [ 1336.730311] env[68437]: _type = "Task" [ 1336.730311] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.737395] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52333406-e768-6517-b849-049f5abb94c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.819363] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945217, 'name': RemoveSnapshot_Task, 'duration_secs': 0.395408} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.819590] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1336.819869] env[68437]: DEBUG nova.compute.manager [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1336.820891] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace86360-6ae1-42e8-bc66-c0870c1771ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.968214] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 6d4f617c-97d6-4721-97c0-7a6b8676d681 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1336.968389] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 3562f6dc-2596-4878-96f5-1e0da54a168b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1336.968503] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 88191506-b278-4502-b72d-07169f4fd6a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1336.968689] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1336.969240] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1337.015826] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25220840-dbcf-4f43-9f0a-3982b35815ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.023950] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c448b35-b525-4a4a-830e-d78424210ce3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.054673] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76bb0679-ea99-47d4-b6d8-d3405907e704 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.062411] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5edd4ff-e54e-4767-8250-9118a731d257 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.076947] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.240664] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52333406-e768-6517-b849-049f5abb94c3, 'name': SearchDatastore_Task, 'duration_secs': 0.011626} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.241455] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-691f497e-ffca-4a24-bdab-344a9c6cfebb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.246565] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1337.246565] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]529b5ea2-49b9-12ed-5b7f-e6d79c964abc" [ 1337.246565] env[68437]: _type = "Task" [ 1337.246565] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.253836] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529b5ea2-49b9-12ed-5b7f-e6d79c964abc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.333125] env[68437]: INFO nova.compute.manager [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Shelve offloading [ 1337.579545] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1337.758026] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]529b5ea2-49b9-12ed-5b7f-e6d79c964abc, 'name': SearchDatastore_Task, 'duration_secs': 0.010276} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.758366] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.758636] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 88191506-b278-4502-b72d-07169f4fd6a6/88191506-b278-4502-b72d-07169f4fd6a6.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1337.758904] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74ebc31e-8324-450b-8b48-a5bdc011314d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.765818] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1337.765818] env[68437]: value = "task-2945218" [ 1337.765818] env[68437]: _type = "Task" [ 1337.765818] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.774510] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945218, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.837469] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1337.837739] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d440738-d7d6-4baf-9879-8066b9190f3e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.844436] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1337.844436] env[68437]: value = "task-2945219" [ 1337.844436] env[68437]: _type = "Task" [ 1337.844436] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.852711] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.087529] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1338.087780] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.148s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1338.277049] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945218, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486376} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.277049] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 88191506-b278-4502-b72d-07169f4fd6a6/88191506-b278-4502-b72d-07169f4fd6a6.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1338.277049] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1338.277545] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7fc7016-bfb0-45ff-b868-96d20c888685 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.283472] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1338.283472] env[68437]: value = "task-2945220" [ 1338.283472] env[68437]: _type = "Task" [ 1338.283472] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.290589] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945220, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.354600] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1338.356053] env[68437]: DEBUG nova.compute.manager [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1338.356053] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b743a3c-942f-4bc5-be0c-61b6654d4db4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.362156] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.362156] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1338.362275] env[68437]: DEBUG nova.network.neutron [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1338.793505] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945220, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.117287] env[68437]: DEBUG nova.network.neutron [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating instance_info_cache with network_info: [{"id": "d3799e51-78a6-4580-a8ae-68366989843d", "address": "fa:16:3e:94:2c:29", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3799e51-78", "ovs_interfaceid": "d3799e51-78a6-4580-a8ae-68366989843d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.293863] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945220, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.541953} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.294143] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1339.294925] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7c6a7d-e01f-43a9-8cbe-7060688fea87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.316175] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 88191506-b278-4502-b72d-07169f4fd6a6/88191506-b278-4502-b72d-07169f4fd6a6.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1339.316423] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34a3a06d-a664-4f5c-b665-3a19d77eb48c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.335131] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1339.335131] env[68437]: value = "task-2945221" [ 1339.335131] env[68437]: _type = "Task" [ 1339.335131] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.342450] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945221, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.619751] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1339.844691] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945221, 'name': ReconfigVM_Task, 'duration_secs': 0.315231} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.845024] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 88191506-b278-4502-b72d-07169f4fd6a6/88191506-b278-4502-b72d-07169f4fd6a6.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1339.845678] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d13b5223-f3e9-4eb1-9a89-8503f83a4596 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.852134] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1339.852134] env[68437]: value = "task-2945222" [ 1339.852134] env[68437]: _type = "Task" [ 1339.852134] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.863218] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945222, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.010903] env[68437]: DEBUG nova.compute.manager [req-c2ace972-dc27-489f-98a7-6a966283fe8d req-10180383-0399-4eac-afe2-fe4481c72313 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received event network-vif-unplugged-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1340.011161] env[68437]: DEBUG oslo_concurrency.lockutils [req-c2ace972-dc27-489f-98a7-6a966283fe8d req-10180383-0399-4eac-afe2-fe4481c72313 service nova] Acquiring lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.011374] env[68437]: DEBUG oslo_concurrency.lockutils [req-c2ace972-dc27-489f-98a7-6a966283fe8d req-10180383-0399-4eac-afe2-fe4481c72313 service nova] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.011542] env[68437]: DEBUG oslo_concurrency.lockutils [req-c2ace972-dc27-489f-98a7-6a966283fe8d req-10180383-0399-4eac-afe2-fe4481c72313 service nova] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.011767] env[68437]: DEBUG nova.compute.manager [req-c2ace972-dc27-489f-98a7-6a966283fe8d req-10180383-0399-4eac-afe2-fe4481c72313 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] No waiting events found dispatching network-vif-unplugged-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1340.011953] env[68437]: WARNING nova.compute.manager [req-c2ace972-dc27-489f-98a7-6a966283fe8d req-10180383-0399-4eac-afe2-fe4481c72313 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received unexpected event network-vif-unplugged-d3799e51-78a6-4580-a8ae-68366989843d for instance with vm_state shelved and task_state shelving_offloading. [ 1340.160840] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1340.161875] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2bc40a-694c-411c-86f4-33f0499f17f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.169570] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1340.170121] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b385209-7249-4afe-9854-a7b0e11b04d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.235229] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1340.235446] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1340.235633] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleting the datastore file [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1340.236149] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-187b1206-1736-47b8-84e2-0899e07aa5b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.242562] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1340.242562] env[68437]: value = "task-2945224" [ 1340.242562] env[68437]: _type = "Task" [ 1340.242562] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.250562] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.362797] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945222, 'name': Rename_Task, 'duration_secs': 0.15426} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.363234] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1340.363583] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa947783-dde6-400e-a8ae-c3e1b48a9b36 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.369776] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1340.369776] env[68437]: value = "task-2945225" [ 1340.369776] env[68437]: _type = "Task" [ 1340.369776] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.378544] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.751916] env[68437]: DEBUG oslo_vmware.api [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180214} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.752713] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1340.752983] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1340.753118] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1340.780396] env[68437]: INFO nova.scheduler.client.report [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted allocations for instance 6d4f617c-97d6-4721-97c0-7a6b8676d681 [ 1340.880137] env[68437]: DEBUG oslo_vmware.api [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945225, 'name': PowerOnVM_Task, 'duration_secs': 0.479193} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.880528] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1340.880887] env[68437]: INFO nova.compute.manager [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Took 7.70 seconds to spawn the instance on the hypervisor. [ 1340.882419] env[68437]: DEBUG nova.compute.manager [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1340.882419] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b397dd4a-4ac9-4277-a189-78ba8d5a3209 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.284539] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.284838] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.285073] env[68437]: DEBUG nova.objects.instance [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'resources' on Instance uuid 6d4f617c-97d6-4721-97c0-7a6b8676d681 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1341.406062] env[68437]: INFO nova.compute.manager [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Took 12.42 seconds to build instance. [ 1341.788755] env[68437]: DEBUG nova.objects.instance [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'numa_topology' on Instance uuid 6d4f617c-97d6-4721-97c0-7a6b8676d681 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1341.908590] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96db23e8-0a06-4071-b41d-8d1d86dbe0c1 tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "88191506-b278-4502-b72d-07169f4fd6a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.932s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1342.041520] env[68437]: DEBUG nova.compute.manager [req-adff1c16-4a23-4aa0-9d7b-319f025c7f51 req-fea10c93-f98f-4b2b-8247-2a2932af9a4c service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received event network-changed-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1342.041755] env[68437]: DEBUG nova.compute.manager [req-adff1c16-4a23-4aa0-9d7b-319f025c7f51 req-fea10c93-f98f-4b2b-8247-2a2932af9a4c service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Refreshing instance network info cache due to event network-changed-d3799e51-78a6-4580-a8ae-68366989843d. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1342.042012] env[68437]: DEBUG oslo_concurrency.lockutils [req-adff1c16-4a23-4aa0-9d7b-319f025c7f51 req-fea10c93-f98f-4b2b-8247-2a2932af9a4c service nova] Acquiring lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.042198] env[68437]: DEBUG oslo_concurrency.lockutils [req-adff1c16-4a23-4aa0-9d7b-319f025c7f51 req-fea10c93-f98f-4b2b-8247-2a2932af9a4c service nova] Acquired lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.042428] env[68437]: DEBUG nova.network.neutron [req-adff1c16-4a23-4aa0-9d7b-319f025c7f51 req-fea10c93-f98f-4b2b-8247-2a2932af9a4c service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Refreshing network info cache for port d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1342.293334] env[68437]: DEBUG nova.objects.base [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Object Instance<6d4f617c-97d6-4721-97c0-7a6b8676d681> lazy-loaded attributes: resources,numa_topology {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1342.348518] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0a8de4-2ca7-4b86-b126-3a51af320b6b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.356452] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed65e1d-55f3-4752-9777-3099edbfb581 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.388013] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767ceb51-623a-42f0-9f1b-fbdde0982f52 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.396026] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c6df50-7fc0-40a0-8329-f74d1bb8b128 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.409231] env[68437]: DEBUG nova.compute.provider_tree [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.485210] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquiring lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.485578] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.698521] env[68437]: DEBUG nova.compute.manager [req-8f84d3d6-3c2e-4e62-a141-7a5cbe780698 req-86da0b69-2d2c-44cc-b825-70ecc30d324e service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Received event network-changed-59123504-f773-43df-9389-0f86ff9b7e52 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1342.698707] env[68437]: DEBUG nova.compute.manager [req-8f84d3d6-3c2e-4e62-a141-7a5cbe780698 req-86da0b69-2d2c-44cc-b825-70ecc30d324e service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Refreshing instance network info cache due to event network-changed-59123504-f773-43df-9389-0f86ff9b7e52. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1342.698919] env[68437]: DEBUG oslo_concurrency.lockutils [req-8f84d3d6-3c2e-4e62-a141-7a5cbe780698 req-86da0b69-2d2c-44cc-b825-70ecc30d324e service nova] Acquiring lock "refresh_cache-88191506-b278-4502-b72d-07169f4fd6a6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.699387] env[68437]: DEBUG oslo_concurrency.lockutils [req-8f84d3d6-3c2e-4e62-a141-7a5cbe780698 req-86da0b69-2d2c-44cc-b825-70ecc30d324e service nova] Acquired lock "refresh_cache-88191506-b278-4502-b72d-07169f4fd6a6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.699565] env[68437]: DEBUG nova.network.neutron [req-8f84d3d6-3c2e-4e62-a141-7a5cbe780698 req-86da0b69-2d2c-44cc-b825-70ecc30d324e service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Refreshing network info cache for port 59123504-f773-43df-9389-0f86ff9b7e52 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1342.780588] env[68437]: DEBUG nova.network.neutron [req-adff1c16-4a23-4aa0-9d7b-319f025c7f51 req-fea10c93-f98f-4b2b-8247-2a2932af9a4c service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updated VIF entry in instance network info cache for port d3799e51-78a6-4580-a8ae-68366989843d. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1342.780922] env[68437]: DEBUG nova.network.neutron [req-adff1c16-4a23-4aa0-9d7b-319f025c7f51 req-fea10c93-f98f-4b2b-8247-2a2932af9a4c service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating instance_info_cache with network_info: [{"id": "d3799e51-78a6-4580-a8ae-68366989843d", "address": "fa:16:3e:94:2c:29", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd3799e51-78", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.913903] env[68437]: DEBUG nova.scheduler.client.report [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1342.987642] env[68437]: DEBUG nova.compute.manager [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1343.148924] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.283507] env[68437]: DEBUG oslo_concurrency.lockutils [req-adff1c16-4a23-4aa0-9d7b-319f025c7f51 req-fea10c93-f98f-4b2b-8247-2a2932af9a4c service nova] Releasing lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.409061] env[68437]: DEBUG nova.network.neutron [req-8f84d3d6-3c2e-4e62-a141-7a5cbe780698 req-86da0b69-2d2c-44cc-b825-70ecc30d324e service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Updated VIF entry in instance network info cache for port 59123504-f773-43df-9389-0f86ff9b7e52. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1343.409471] env[68437]: DEBUG nova.network.neutron [req-8f84d3d6-3c2e-4e62-a141-7a5cbe780698 req-86da0b69-2d2c-44cc-b825-70ecc30d324e service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Updating instance_info_cache with network_info: [{"id": "59123504-f773-43df-9389-0f86ff9b7e52", "address": "fa:16:3e:6a:aa:42", "network": {"id": "37c0e28f-b9ac-482c-a2a6-bee79d99c1a1", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1238628437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5e76db8ccf4b4e6fa4028ffa815ff3ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8edfde4-5a99-4745-956d-04da82ab1b85", "external-id": "nsx-vlan-transportzone-519", "segmentation_id": 519, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59123504-f7", "ovs_interfaceid": "59123504-f773-43df-9389-0f86ff9b7e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.420772] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.136s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.513491] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.513783] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.515418] env[68437]: INFO nova.compute.claims [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1343.913767] env[68437]: DEBUG oslo_concurrency.lockutils [req-8f84d3d6-3c2e-4e62-a141-7a5cbe780698 req-86da0b69-2d2c-44cc-b825-70ecc30d324e service nova] Releasing lock "refresh_cache-88191506-b278-4502-b72d-07169f4fd6a6" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.929745] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25ccb7c8-d644-454b-ba52-73ac397c9d88 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.328s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.930583] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.782s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.930772] env[68437]: INFO nova.compute.manager [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Unshelving [ 1344.582568] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3788a8-f365-4603-a0ad-071894964419 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.591702] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428a6fcb-cd7b-4989-8451-0c8911378c28 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.621315] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5e6f90-39f5-4165-82e0-f034ae0cd45a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.628977] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed3345b-d2e2-4e1f-88a1-a618ecdd0aae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.641955] env[68437]: DEBUG nova.compute.provider_tree [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.951468] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.145651] env[68437]: DEBUG nova.scheduler.client.report [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1345.650761] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.137s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.651322] env[68437]: DEBUG nova.compute.manager [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1345.654247] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.703s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.654490] env[68437]: DEBUG nova.objects.instance [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'pci_requests' on Instance uuid 6d4f617c-97d6-4721-97c0-7a6b8676d681 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1346.157404] env[68437]: DEBUG nova.compute.utils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1346.160318] env[68437]: DEBUG nova.objects.instance [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'numa_topology' on Instance uuid 6d4f617c-97d6-4721-97c0-7a6b8676d681 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1346.161582] env[68437]: DEBUG nova.compute.manager [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1346.161801] env[68437]: DEBUG nova.network.neutron [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1346.209386] env[68437]: DEBUG nova.policy [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c0ad6b72826461182b13891a59ad5ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a360795f512644e7ae10b78395d028df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1346.452832] env[68437]: DEBUG nova.network.neutron [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Successfully created port: 34d144a1-de22-4f6c-96d8-d66a24f9f98e {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1346.662972] env[68437]: DEBUG nova.compute.manager [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1346.665686] env[68437]: INFO nova.compute.claims [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1347.674783] env[68437]: DEBUG nova.compute.manager [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1347.705832] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1347.706642] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1347.706642] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1347.706642] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1347.706642] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1347.706642] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1347.706897] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1347.706897] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1347.707037] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1347.707194] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1347.707365] env[68437]: DEBUG nova.virt.hardware [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1347.708268] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7db398-9796-42bf-99f0-13e1096a88e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.719246] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4830fb-50c1-467d-bb66-2d924342a0db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.747062] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1aa7ef-21b6-46fd-8855-d4a3af84940f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.753872] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dce0340-6737-42da-80f9-ce8eb5633ffe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.782961] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6246c77a-a443-4d33-9925-d85df0265520 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.790607] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b2ab89-398a-4bce-9d1d-a27a558ffd76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.805093] env[68437]: DEBUG nova.compute.provider_tree [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1347.808195] env[68437]: DEBUG nova.compute.manager [req-bf5c98b3-0603-463d-8bc8-ded11f1da096 req-48ca3c09-bb85-4b32-a531-488d81d523a9 service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Received event network-vif-plugged-34d144a1-de22-4f6c-96d8-d66a24f9f98e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1347.808422] env[68437]: DEBUG oslo_concurrency.lockutils [req-bf5c98b3-0603-463d-8bc8-ded11f1da096 req-48ca3c09-bb85-4b32-a531-488d81d523a9 service nova] Acquiring lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.808633] env[68437]: DEBUG oslo_concurrency.lockutils [req-bf5c98b3-0603-463d-8bc8-ded11f1da096 req-48ca3c09-bb85-4b32-a531-488d81d523a9 service nova] Lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.808803] env[68437]: DEBUG oslo_concurrency.lockutils [req-bf5c98b3-0603-463d-8bc8-ded11f1da096 req-48ca3c09-bb85-4b32-a531-488d81d523a9 service nova] Lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.808983] env[68437]: DEBUG nova.compute.manager [req-bf5c98b3-0603-463d-8bc8-ded11f1da096 req-48ca3c09-bb85-4b32-a531-488d81d523a9 service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] No waiting events found dispatching network-vif-plugged-34d144a1-de22-4f6c-96d8-d66a24f9f98e {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1347.809179] env[68437]: WARNING nova.compute.manager [req-bf5c98b3-0603-463d-8bc8-ded11f1da096 req-48ca3c09-bb85-4b32-a531-488d81d523a9 service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Received unexpected event network-vif-plugged-34d144a1-de22-4f6c-96d8-d66a24f9f98e for instance with vm_state building and task_state spawning. [ 1347.886493] env[68437]: DEBUG nova.network.neutron [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Successfully updated port: 34d144a1-de22-4f6c-96d8-d66a24f9f98e {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1348.311339] env[68437]: DEBUG nova.scheduler.client.report [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1348.389375] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquiring lock "refresh_cache-b85083c1-6b10-4bd4-8bf4-a23e961863f0" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.389539] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquired lock "refresh_cache-b85083c1-6b10-4bd4-8bf4-a23e961863f0" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1348.389630] env[68437]: DEBUG nova.network.neutron [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1348.816727] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.162s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.845055] env[68437]: INFO nova.network.neutron [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating port d3799e51-78a6-4580-a8ae-68366989843d with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1348.934855] env[68437]: DEBUG nova.network.neutron [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1349.122500] env[68437]: DEBUG nova.network.neutron [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Updating instance_info_cache with network_info: [{"id": "34d144a1-de22-4f6c-96d8-d66a24f9f98e", "address": "fa:16:3e:73:93:f9", "network": {"id": "56f65d23-c443-48fa-b11d-eb0eb7d3ccb7", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-895260781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a360795f512644e7ae10b78395d028df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34d144a1-de", "ovs_interfaceid": "34d144a1-de22-4f6c-96d8-d66a24f9f98e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.625653] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Releasing lock "refresh_cache-b85083c1-6b10-4bd4-8bf4-a23e961863f0" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1349.625980] env[68437]: DEBUG nova.compute.manager [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Instance network_info: |[{"id": "34d144a1-de22-4f6c-96d8-d66a24f9f98e", "address": "fa:16:3e:73:93:f9", "network": {"id": "56f65d23-c443-48fa-b11d-eb0eb7d3ccb7", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-895260781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a360795f512644e7ae10b78395d028df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34d144a1-de", "ovs_interfaceid": "34d144a1-de22-4f6c-96d8-d66a24f9f98e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1349.626421] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:93:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b6a4065-12af-4fb9-ac47-ec9143f7297e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34d144a1-de22-4f6c-96d8-d66a24f9f98e', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1349.634355] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Creating folder: Project (a360795f512644e7ae10b78395d028df). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1349.634645] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9520be74-5bce-412c-aec0-9a37df782198 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.646166] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Created folder: Project (a360795f512644e7ae10b78395d028df) in parent group-v590848. [ 1349.646341] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Creating folder: Instances. Parent ref: group-v591161. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1349.646549] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c67dd869-a4be-473a-90b6-cb7e98181c06 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.654749] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Created folder: Instances in parent group-v591161. [ 1349.654961] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1349.655150] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1349.655328] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c17344f3-ead8-4566-8f6e-e63f9f8f899b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.674483] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1349.674483] env[68437]: value = "task-2945228" [ 1349.674483] env[68437]: _type = "Task" [ 1349.674483] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.681537] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945228, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.833229] env[68437]: DEBUG nova.compute.manager [req-e3262674-67e3-43ee-94d2-44643f590ee3 req-c88ad3f0-7f6d-4911-b38f-c6b3f5a9758e service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Received event network-changed-34d144a1-de22-4f6c-96d8-d66a24f9f98e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1349.833468] env[68437]: DEBUG nova.compute.manager [req-e3262674-67e3-43ee-94d2-44643f590ee3 req-c88ad3f0-7f6d-4911-b38f-c6b3f5a9758e service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Refreshing instance network info cache due to event network-changed-34d144a1-de22-4f6c-96d8-d66a24f9f98e. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1349.833619] env[68437]: DEBUG oslo_concurrency.lockutils [req-e3262674-67e3-43ee-94d2-44643f590ee3 req-c88ad3f0-7f6d-4911-b38f-c6b3f5a9758e service nova] Acquiring lock "refresh_cache-b85083c1-6b10-4bd4-8bf4-a23e961863f0" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.833765] env[68437]: DEBUG oslo_concurrency.lockutils [req-e3262674-67e3-43ee-94d2-44643f590ee3 req-c88ad3f0-7f6d-4911-b38f-c6b3f5a9758e service nova] Acquired lock "refresh_cache-b85083c1-6b10-4bd4-8bf4-a23e961863f0" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1349.834008] env[68437]: DEBUG nova.network.neutron [req-e3262674-67e3-43ee-94d2-44643f590ee3 req-c88ad3f0-7f6d-4911-b38f-c6b3f5a9758e service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Refreshing network info cache for port 34d144a1-de22-4f6c-96d8-d66a24f9f98e {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1350.184426] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945228, 'name': CreateVM_Task, 'duration_secs': 0.373995} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.184623] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1350.185318] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.185484] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.185808] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1350.186067] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9fa46bc-815e-4535-831f-e02996555876 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.190626] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1350.190626] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c459a4-598b-eade-d66e-89be9175d502" [ 1350.190626] env[68437]: _type = "Task" [ 1350.190626] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.198335] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c459a4-598b-eade-d66e-89be9175d502, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.406316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.406316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.406316] env[68437]: DEBUG nova.network.neutron [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1350.538605] env[68437]: DEBUG nova.network.neutron [req-e3262674-67e3-43ee-94d2-44643f590ee3 req-c88ad3f0-7f6d-4911-b38f-c6b3f5a9758e service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Updated VIF entry in instance network info cache for port 34d144a1-de22-4f6c-96d8-d66a24f9f98e. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1350.539047] env[68437]: DEBUG nova.network.neutron [req-e3262674-67e3-43ee-94d2-44643f590ee3 req-c88ad3f0-7f6d-4911-b38f-c6b3f5a9758e service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Updating instance_info_cache with network_info: [{"id": "34d144a1-de22-4f6c-96d8-d66a24f9f98e", "address": "fa:16:3e:73:93:f9", "network": {"id": "56f65d23-c443-48fa-b11d-eb0eb7d3ccb7", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-895260781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a360795f512644e7ae10b78395d028df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34d144a1-de", "ovs_interfaceid": "34d144a1-de22-4f6c-96d8-d66a24f9f98e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.701889] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c459a4-598b-eade-d66e-89be9175d502, 'name': SearchDatastore_Task, 'duration_secs': 0.01183} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.702142] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1350.702368] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1350.702601] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.702749] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.702929] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1350.703195] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74b341a2-2d99-476c-8fd9-8aa63dd5b913 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.710585] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1350.710778] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1350.711479] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c593d64-0f91-4595-83f3-3299e00663eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.716458] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1350.716458] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52582d54-4dbe-4296-4bb2-c403abd6e331" [ 1350.716458] env[68437]: _type = "Task" [ 1350.716458] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.723434] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52582d54-4dbe-4296-4bb2-c403abd6e331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.041439] env[68437]: DEBUG oslo_concurrency.lockutils [req-e3262674-67e3-43ee-94d2-44643f590ee3 req-c88ad3f0-7f6d-4911-b38f-c6b3f5a9758e service nova] Releasing lock "refresh_cache-b85083c1-6b10-4bd4-8bf4-a23e961863f0" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1351.101351] env[68437]: DEBUG nova.network.neutron [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating instance_info_cache with network_info: [{"id": "d3799e51-78a6-4580-a8ae-68366989843d", "address": "fa:16:3e:94:2c:29", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3799e51-78", "ovs_interfaceid": "d3799e51-78a6-4580-a8ae-68366989843d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.226300] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52582d54-4dbe-4296-4bb2-c403abd6e331, 'name': SearchDatastore_Task, 'duration_secs': 0.009085} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.227099] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-489c623c-f825-4dad-828f-aa39dcaf1bf5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.232517] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1351.232517] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ef088c-736b-a7c0-b449-01b686032e19" [ 1351.232517] env[68437]: _type = "Task" [ 1351.232517] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.239936] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ef088c-736b-a7c0-b449-01b686032e19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.603988] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1351.627343] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='380726df1cb2659a4a416213f3c20339',container_format='bare',created_at=2025-03-11T18:48:50Z,direct_url=,disk_format='vmdk',id=706b952b-a1ff-4ca2-a65f-b55624309a9e,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1242808668-shelved',owner='0e28f7fd8c8d412f8c9e1624c55d6604',properties=ImageMetaProps,protected=,size=31661568,status='active',tags=,updated_at=2025-03-11T18:49:06Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1351.627592] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1351.627753] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1351.627938] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1351.628097] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1351.628250] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1351.628459] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1351.628616] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1351.628782] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1351.628942] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1351.629133] env[68437]: DEBUG nova.virt.hardware [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1351.630280] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7afcc5-2dcd-46dd-bfda-ee4b19b4c1ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.638403] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aba033c-7849-4c9c-8b91-f4a81280c730 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.651205] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:2c:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3799e51-78a6-4580-a8ae-68366989843d', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1351.658379] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1351.658605] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1351.658804] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c75e9ca-6415-4e59-95a4-a39369dd7bc2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.676621] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1351.676621] env[68437]: value = "task-2945229" [ 1351.676621] env[68437]: _type = "Task" [ 1351.676621] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.683611] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945229, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.742768] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52ef088c-736b-a7c0-b449-01b686032e19, 'name': SearchDatastore_Task, 'duration_secs': 0.010672} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.743103] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1351.743403] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] b85083c1-6b10-4bd4-8bf4-a23e961863f0/b85083c1-6b10-4bd4-8bf4-a23e961863f0.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1351.743699] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cdc7704-1ee8-49f0-a695-0936d51f40e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.749503] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1351.749503] env[68437]: value = "task-2945230" [ 1351.749503] env[68437]: _type = "Task" [ 1351.749503] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.757178] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.862834] env[68437]: DEBUG nova.compute.manager [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received event network-vif-plugged-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1351.863023] env[68437]: DEBUG oslo_concurrency.lockutils [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] Acquiring lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1351.863108] env[68437]: DEBUG oslo_concurrency.lockutils [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1351.863340] env[68437]: DEBUG oslo_concurrency.lockutils [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1351.863520] env[68437]: DEBUG nova.compute.manager [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] No waiting events found dispatching network-vif-plugged-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1351.863661] env[68437]: WARNING nova.compute.manager [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received unexpected event network-vif-plugged-d3799e51-78a6-4580-a8ae-68366989843d for instance with vm_state shelved_offloaded and task_state spawning. [ 1351.863840] env[68437]: DEBUG nova.compute.manager [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received event network-changed-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1351.864020] env[68437]: DEBUG nova.compute.manager [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Refreshing instance network info cache due to event network-changed-d3799e51-78a6-4580-a8ae-68366989843d. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1351.864221] env[68437]: DEBUG oslo_concurrency.lockutils [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] Acquiring lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.864364] env[68437]: DEBUG oslo_concurrency.lockutils [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] Acquired lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1351.864538] env[68437]: DEBUG nova.network.neutron [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Refreshing network info cache for port d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1352.193060] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945229, 'name': CreateVM_Task, 'duration_secs': 0.389939} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.193060] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1352.193779] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.194092] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "[datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1352.194556] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1352.194965] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa9fc6a4-b90d-4679-926c-bcf09b6f112e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.200485] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1352.200485] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52867bfd-5671-4624-ac25-f915ac51eac4" [ 1352.200485] env[68437]: _type = "Task" [ 1352.200485] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.209312] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52867bfd-5671-4624-ac25-f915ac51eac4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.258384] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480245} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.258612] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore2] b85083c1-6b10-4bd4-8bf4-a23e961863f0/b85083c1-6b10-4bd4-8bf4-a23e961863f0.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1352.258825] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1352.259076] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-973cfb0b-e1b7-43c7-b3f9-2393aa5b5d78 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.265297] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1352.265297] env[68437]: value = "task-2945231" [ 1352.265297] env[68437]: _type = "Task" [ 1352.265297] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.273345] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945231, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.553416] env[68437]: DEBUG nova.network.neutron [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updated VIF entry in instance network info cache for port d3799e51-78a6-4580-a8ae-68366989843d. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1352.553785] env[68437]: DEBUG nova.network.neutron [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating instance_info_cache with network_info: [{"id": "d3799e51-78a6-4580-a8ae-68366989843d", "address": "fa:16:3e:94:2c:29", "network": {"id": "a6815e19-5a69-40c3-961a-2d38d7374d3e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-922241912-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e28f7fd8c8d412f8c9e1624c55d6604", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3799e51-78", "ovs_interfaceid": "d3799e51-78a6-4580-a8ae-68366989843d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.710906] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "[datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1352.711180] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Processing image 706b952b-a1ff-4ca2-a65f-b55624309a9e {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1352.711412] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e/706b952b-a1ff-4ca2-a65f-b55624309a9e.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.711561] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquired lock "[datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e/706b952b-a1ff-4ca2-a65f-b55624309a9e.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1352.711742] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1352.711986] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5f1cb3c-5fb6-4a5d-a80f-1325a97541cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.723830] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1352.723993] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1352.724723] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-736237c1-a991-44a1-8142-bbf53f105d4c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.729393] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1352.729393] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e19a79-2c5a-67f2-3346-4e22dadafda5" [ 1352.729393] env[68437]: _type = "Task" [ 1352.729393] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.736535] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e19a79-2c5a-67f2-3346-4e22dadafda5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.773894] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945231, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069224} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.774255] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1352.775116] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d13f29-34f7-4eaf-840b-5e001532efb8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.796142] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] b85083c1-6b10-4bd4-8bf4-a23e961863f0/b85083c1-6b10-4bd4-8bf4-a23e961863f0.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1352.796438] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-912af441-0290-4f87-ac46-42ef203ae273 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.814463] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1352.814463] env[68437]: value = "task-2945232" [ 1352.814463] env[68437]: _type = "Task" [ 1352.814463] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.822060] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945232, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.056937] env[68437]: DEBUG oslo_concurrency.lockutils [req-43f52910-2f03-45f7-ad30-40fb307a7a82 req-229cdb05-62d6-4bd5-a465-2bb4227db89e service nova] Releasing lock "refresh_cache-6d4f617c-97d6-4721-97c0-7a6b8676d681" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1353.239743] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Preparing fetch location {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1353.240077] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Fetch image to [datastore2] OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f/OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f.vmdk {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1353.240315] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Downloading stream optimized image 706b952b-a1ff-4ca2-a65f-b55624309a9e to [datastore2] OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f/OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f.vmdk on the data store datastore2 as vApp {{(pid=68437) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1353.240492] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Downloading image file data 706b952b-a1ff-4ca2-a65f-b55624309a9e to the ESX as VM named 'OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f' {{(pid=68437) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1353.304285] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1353.304285] env[68437]: value = "resgroup-9" [ 1353.304285] env[68437]: _type = "ResourcePool" [ 1353.304285] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1353.304629] env[68437]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f1448db1-3499-4f23-ad84-633af778fb67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.327685] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945232, 'name': ReconfigVM_Task, 'duration_secs': 0.309045} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.328838] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Reconfigured VM instance instance-00000078 to attach disk [datastore2] b85083c1-6b10-4bd4-8bf4-a23e961863f0/b85083c1-6b10-4bd4-8bf4-a23e961863f0.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1353.329526] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lease: (returnval){ [ 1353.329526] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52943774-c7a9-bcce-837c-58ed089c2835" [ 1353.329526] env[68437]: _type = "HttpNfcLease" [ 1353.329526] env[68437]: } obtained for vApp import into resource pool (val){ [ 1353.329526] env[68437]: value = "resgroup-9" [ 1353.329526] env[68437]: _type = "ResourcePool" [ 1353.329526] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1353.329769] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the lease: (returnval){ [ 1353.329769] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52943774-c7a9-bcce-837c-58ed089c2835" [ 1353.329769] env[68437]: _type = "HttpNfcLease" [ 1353.329769] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1353.329917] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5d1fac7-f290-4855-bf18-51866eb3c0f4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.337268] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1353.337268] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52943774-c7a9-bcce-837c-58ed089c2835" [ 1353.337268] env[68437]: _type = "HttpNfcLease" [ 1353.337268] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1353.339323] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1353.339323] env[68437]: value = "task-2945234" [ 1353.339323] env[68437]: _type = "Task" [ 1353.339323] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.351652] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945234, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.840304] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1353.840304] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52943774-c7a9-bcce-837c-58ed089c2835" [ 1353.840304] env[68437]: _type = "HttpNfcLease" [ 1353.840304] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1353.850873] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945234, 'name': Rename_Task, 'duration_secs': 0.175771} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.851253] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1353.851552] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4dc7b5c3-3167-426e-a1fb-519fb83144dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.858162] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1353.858162] env[68437]: value = "task-2945235" [ 1353.858162] env[68437]: _type = "Task" [ 1353.858162] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.867742] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.340953] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1354.340953] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52943774-c7a9-bcce-837c-58ed089c2835" [ 1354.340953] env[68437]: _type = "HttpNfcLease" [ 1354.340953] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1354.341329] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1354.341329] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52943774-c7a9-bcce-837c-58ed089c2835" [ 1354.341329] env[68437]: _type = "HttpNfcLease" [ 1354.341329] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1354.341965] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a859cb5-6b98-4b42-87b6-bf74bdbd1f0c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.349353] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d1865f-5df5-19dc-1d07-43504b2ec39f/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1354.349532] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating HTTP connection to write to file with size = 31661568 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d1865f-5df5-19dc-1d07-43504b2ec39f/disk-0.vmdk. {{(pid=68437) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1354.418745] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8644c656-e9bc-49a3-a53e-f8eb68236e6f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.420739] env[68437]: DEBUG oslo_vmware.api [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945235, 'name': PowerOnVM_Task, 'duration_secs': 0.502605} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.423052] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1354.423052] env[68437]: INFO nova.compute.manager [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Took 6.75 seconds to spawn the instance on the hypervisor. [ 1354.423307] env[68437]: DEBUG nova.compute.manager [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1354.424698] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8a34e9-2002-4e10-acf6-5f9f8761b89c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.948781] env[68437]: INFO nova.compute.manager [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Took 11.45 seconds to build instance. [ 1355.450284] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8040bd77-a8d1-4724-9d03-a7604b2dd96b tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.965s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.502667] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Completed reading data from the image iterator. {{(pid=68437) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1355.503025] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d1865f-5df5-19dc-1d07-43504b2ec39f/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1355.504148] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068ae932-8294-4c38-a6b1-5864d0851f86 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.512666] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d1865f-5df5-19dc-1d07-43504b2ec39f/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1355.512865] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d1865f-5df5-19dc-1d07-43504b2ec39f/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1355.513173] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-fca399af-0a90-4768-ac02-9ec480118eda {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.609867] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquiring lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.610258] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.610602] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquiring lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.610926] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1355.611175] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1355.613624] env[68437]: INFO nova.compute.manager [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Terminating instance [ 1355.977392] env[68437]: DEBUG oslo_vmware.rw_handles [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d1865f-5df5-19dc-1d07-43504b2ec39f/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1355.977661] env[68437]: INFO nova.virt.vmwareapi.images [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Downloaded image file data 706b952b-a1ff-4ca2-a65f-b55624309a9e [ 1355.978463] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5d763b-2b80-4766-8f12-97fcd18980e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.993824] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-723f2c76-3292-4bc2-beb9-03e588f9ca69 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.118422] env[68437]: DEBUG nova.compute.manager [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1356.118655] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1356.119575] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6f43c2-9be1-4d2f-ac50-5bed05557ebf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.127310] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1356.127538] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-906a518d-9fdb-4e7a-989a-611adc207896 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.133821] env[68437]: DEBUG oslo_vmware.api [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1356.133821] env[68437]: value = "task-2945237" [ 1356.133821] env[68437]: _type = "Task" [ 1356.133821] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.143255] env[68437]: DEBUG oslo_vmware.api [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.152594] env[68437]: INFO nova.virt.vmwareapi.images [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] The imported VM was unregistered [ 1356.155222] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Caching image {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1356.155462] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Creating directory with path [datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1356.155744] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4330ad0-a788-4f29-87ee-6b78bb583604 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.167651] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Created directory with path [datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1356.167845] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f/OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f.vmdk to [datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e/706b952b-a1ff-4ca2-a65f-b55624309a9e.vmdk. {{(pid=68437) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1356.168111] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-03909026-ec22-4e22-9734-819ed9c1bb42 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.174188] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1356.174188] env[68437]: value = "task-2945238" [ 1356.174188] env[68437]: _type = "Task" [ 1356.174188] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.181585] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945238, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.647064] env[68437]: DEBUG oslo_vmware.api [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945237, 'name': PowerOffVM_Task, 'duration_secs': 0.222168} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.647064] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1356.647503] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1356.647503] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c46c2041-18e2-4a88-a546-e53eb7fec428 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.684017] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945238, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.754518] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1356.754844] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1356.755150] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Deleting the datastore file [datastore2] b85083c1-6b10-4bd4-8bf4-a23e961863f0 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1356.755451] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c9d7a49-a32e-45c2-9e9e-4d1a993698b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.766802] env[68437]: DEBUG oslo_vmware.api [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for the task: (returnval){ [ 1356.766802] env[68437]: value = "task-2945240" [ 1356.766802] env[68437]: _type = "Task" [ 1356.766802] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.781110] env[68437]: DEBUG oslo_vmware.api [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.187115] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945238, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.279140] env[68437]: DEBUG oslo_vmware.api [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.688050] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945238, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.780364] env[68437]: DEBUG oslo_vmware.api [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.189652] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945238, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.281079] env[68437]: DEBUG oslo_vmware.api [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.688101] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945238, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.291026} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.688446] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f/OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f.vmdk to [datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e/706b952b-a1ff-4ca2-a65f-b55624309a9e.vmdk. [ 1358.688446] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Cleaning up location [datastore2] OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1358.688626] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_7e9d4dc5-8ff9-41cd-8f8a-2eebc3b83c3f {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1358.688849] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7577202-9d71-4eb9-bfda-dc92287b4990 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.694825] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1358.694825] env[68437]: value = "task-2945241" [ 1358.694825] env[68437]: _type = "Task" [ 1358.694825] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.701880] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.777416] env[68437]: DEBUG oslo_vmware.api [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Task: {'id': task-2945240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.644817} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.777673] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1358.777844] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1358.778033] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1358.778214] env[68437]: INFO nova.compute.manager [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Took 2.66 seconds to destroy the instance on the hypervisor. [ 1358.778450] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1358.778640] env[68437]: DEBUG nova.compute.manager [-] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1358.778735] env[68437]: DEBUG nova.network.neutron [-] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1359.027275] env[68437]: DEBUG nova.compute.manager [req-e914cab0-74ea-4d7f-a22c-8e3acab08844 req-6530db6d-0c2a-43c7-8e8f-9e6f9360edc2 service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Received event network-vif-deleted-34d144a1-de22-4f6c-96d8-d66a24f9f98e {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1359.028503] env[68437]: INFO nova.compute.manager [req-e914cab0-74ea-4d7f-a22c-8e3acab08844 req-6530db6d-0c2a-43c7-8e8f-9e6f9360edc2 service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Neutron deleted interface 34d144a1-de22-4f6c-96d8-d66a24f9f98e; detaching it from the instance and deleting it from the info cache [ 1359.028503] env[68437]: DEBUG nova.network.neutron [req-e914cab0-74ea-4d7f-a22c-8e3acab08844 req-6530db6d-0c2a-43c7-8e8f-9e6f9360edc2 service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.205866] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.054041} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.206125] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1359.206294] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Releasing lock "[datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e/706b952b-a1ff-4ca2-a65f-b55624309a9e.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1359.206537] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e/706b952b-a1ff-4ca2-a65f-b55624309a9e.vmdk to [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681/6d4f617c-97d6-4721-97c0-7a6b8676d681.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1359.206781] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e212eec-85a4-467c-93c4-6d9f326b0fc6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.214442] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1359.214442] env[68437]: value = "task-2945242" [ 1359.214442] env[68437]: _type = "Task" [ 1359.214442] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.221773] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.506619] env[68437]: DEBUG nova.network.neutron [-] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1359.530145] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3058713d-dd42-49c1-b505-6da47b7edb47 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.544452] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84dede46-9235-4e1c-aae6-55216d5a5ec8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.575718] env[68437]: DEBUG nova.compute.manager [req-e914cab0-74ea-4d7f-a22c-8e3acab08844 req-6530db6d-0c2a-43c7-8e8f-9e6f9360edc2 service nova] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Detach interface failed, port_id=34d144a1-de22-4f6c-96d8-d66a24f9f98e, reason: Instance b85083c1-6b10-4bd4-8bf4-a23e961863f0 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1359.727881] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945242, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.009651] env[68437]: INFO nova.compute.manager [-] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Took 1.23 seconds to deallocate network for instance. [ 1360.228392] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945242, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.516770] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.518224] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1360.518901] env[68437]: DEBUG nova.objects.instance [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lazy-loading 'resources' on Instance uuid b85083c1-6b10-4bd4-8bf4-a23e961863f0 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1360.730580] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945242, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.093238] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f9cc9c-5351-4963-add9-487ef8a573a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.105305] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ac2b7d-8396-4ebb-aaba-4d658cacd41d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.141503] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a8cbba-54e6-498f-942b-48384a05765d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.151164] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7866ae77-bf59-468f-b273-f1c01808775b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.168998] env[68437]: DEBUG nova.compute.provider_tree [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.230053] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945242, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.672834] env[68437]: DEBUG nova.scheduler.client.report [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1361.727328] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945242, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.126839} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.727558] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/706b952b-a1ff-4ca2-a65f-b55624309a9e/706b952b-a1ff-4ca2-a65f-b55624309a9e.vmdk to [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681/6d4f617c-97d6-4721-97c0-7a6b8676d681.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1361.728339] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72a5cdb-b011-44eb-b80d-9313231dbfdc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.750344] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681/6d4f617c-97d6-4721-97c0-7a6b8676d681.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1361.750344] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d09a54bb-d232-4d50-aee0-83fd6bb3a37e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.767666] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1361.767666] env[68437]: value = "task-2945243" [ 1361.767666] env[68437]: _type = "Task" [ 1361.767666] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.774967] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945243, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.177599] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.196832] env[68437]: INFO nova.scheduler.client.report [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Deleted allocations for instance b85083c1-6b10-4bd4-8bf4-a23e961863f0 [ 1362.277305] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945243, 'name': ReconfigVM_Task, 'duration_secs': 0.276478} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.277575] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681/6d4f617c-97d6-4721-97c0-7a6b8676d681.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1362.278202] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eeea466a-7ca7-4d12-8802-bb0c0b01d8bb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.284691] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1362.284691] env[68437]: value = "task-2945244" [ 1362.284691] env[68437]: _type = "Task" [ 1362.284691] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.293100] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945244, 'name': Rename_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.704462] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c803fb62-87ec-4461-8120-ec2734f9cec0 tempest-ServerAddressesTestJSON-2135443341 tempest-ServerAddressesTestJSON-2135443341-project-member] Lock "b85083c1-6b10-4bd4-8bf4-a23e961863f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.094s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1362.795772] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945244, 'name': Rename_Task, 'duration_secs': 0.142228} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.796140] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1362.796330] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9bea16ee-f546-4a2c-9cd1-5bb48adf91bc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.802429] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1362.802429] env[68437]: value = "task-2945245" [ 1362.802429] env[68437]: _type = "Task" [ 1362.802429] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.809714] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945245, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.312713] env[68437]: DEBUG oslo_vmware.api [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945245, 'name': PowerOnVM_Task, 'duration_secs': 0.488986} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.313067] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1363.418898] env[68437]: DEBUG nova.compute.manager [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1363.419832] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb49be8-215d-4010-bedd-7ad4c561661f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.939111] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2cb5d158-3061-4ccc-93f8-68919770e6e5 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.008s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1367.936955] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "3562f6dc-2596-4878-96f5-1e0da54a168b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1367.936955] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.936955] env[68437]: DEBUG nova.compute.manager [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1367.937950] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04459f5-23ee-418b-90c4-68b43e5d1f3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.945165] env[68437]: DEBUG nova.compute.manager [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1367.945744] env[68437]: DEBUG nova.objects.instance [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'flavor' on Instance uuid 3562f6dc-2596-4878-96f5-1e0da54a168b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1368.953801] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1368.954337] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-188cebb1-2da5-46f0-b523-0f4301af05c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.962079] env[68437]: DEBUG oslo_vmware.api [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1368.962079] env[68437]: value = "task-2945246" [ 1368.962079] env[68437]: _type = "Task" [ 1368.962079] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.970120] env[68437]: DEBUG oslo_vmware.api [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.472241] env[68437]: DEBUG oslo_vmware.api [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945246, 'name': PowerOffVM_Task, 'duration_secs': 0.161203} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.472524] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1369.472711] env[68437]: DEBUG nova.compute.manager [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1369.473482] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24798b4-88df-4dfd-a06c-adc0514a841a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.985557] env[68437]: DEBUG oslo_concurrency.lockutils [None req-25d5db01-f932-42c1-86ce-cdd4be13249e tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1370.530061] env[68437]: DEBUG nova.objects.instance [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'flavor' on Instance uuid 3562f6dc-2596-4878-96f5-1e0da54a168b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1371.036068] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.036068] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1371.036068] env[68437]: DEBUG nova.network.neutron [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1371.036068] env[68437]: DEBUG nova.objects.instance [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'info_cache' on Instance uuid 3562f6dc-2596-4878-96f5-1e0da54a168b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1371.539141] env[68437]: DEBUG nova.objects.base [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Object Instance<3562f6dc-2596-4878-96f5-1e0da54a168b> lazy-loaded attributes: flavor,info_cache {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1372.230368] env[68437]: DEBUG nova.network.neutron [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Updating instance_info_cache with network_info: [{"id": "e8fbb16d-1663-49af-b989-d8e689d060c7", "address": "fa:16:3e:3f:15:6d", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8fbb16d-16", "ovs_interfaceid": "e8fbb16d-1663-49af-b989-d8e689d060c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.733069] env[68437]: DEBUG oslo_concurrency.lockutils [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1373.739467] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1373.739871] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f51fe5d9-c4ee-4eb0-8555-18f4e3fb7302 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.748542] env[68437]: DEBUG oslo_vmware.api [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1373.748542] env[68437]: value = "task-2945247" [ 1373.748542] env[68437]: _type = "Task" [ 1373.748542] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.756568] env[68437]: DEBUG oslo_vmware.api [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945247, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.258607] env[68437]: DEBUG oslo_vmware.api [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945247, 'name': PowerOnVM_Task, 'duration_secs': 0.435748} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.258919] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1374.259142] env[68437]: DEBUG nova.compute.manager [None req-6fc8e0db-1d2c-48b4-9806-253ed5396e05 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1374.259834] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ad16c4-3aa4-4ba7-b091-6388eceb1c88 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.285955] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98d81d9-070f-4cda-a81d-35abef05439c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.293191] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7642bbb2-49cd-4a35-8eed-7a818dcf0046 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Suspending the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1375.293431] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-46abf558-e999-4dfb-a021-8adce03b0386 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.299409] env[68437]: DEBUG oslo_vmware.api [None req-7642bbb2-49cd-4a35-8eed-7a818dcf0046 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1375.299409] env[68437]: value = "task-2945248" [ 1375.299409] env[68437]: _type = "Task" [ 1375.299409] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.306951] env[68437]: DEBUG oslo_vmware.api [None req-7642bbb2-49cd-4a35-8eed-7a818dcf0046 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945248, 'name': SuspendVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.810441] env[68437]: DEBUG oslo_vmware.api [None req-7642bbb2-49cd-4a35-8eed-7a818dcf0046 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945248, 'name': SuspendVM_Task} progress is 79%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.309754] env[68437]: DEBUG oslo_vmware.api [None req-7642bbb2-49cd-4a35-8eed-7a818dcf0046 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945248, 'name': SuspendVM_Task, 'duration_secs': 0.555149} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.310123] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7642bbb2-49cd-4a35-8eed-7a818dcf0046 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Suspended the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1376.310246] env[68437]: DEBUG nova.compute.manager [None req-7642bbb2-49cd-4a35-8eed-7a818dcf0046 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1376.310991] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ce72f0-3cbd-4cfe-847e-79f61a1e123c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.602301] env[68437]: INFO nova.compute.manager [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Resuming [ 1377.602979] env[68437]: DEBUG nova.objects.instance [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'flavor' on Instance uuid 3562f6dc-2596-4878-96f5-1e0da54a168b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1379.112984] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.113401] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquired lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1379.113401] env[68437]: DEBUG nova.network.neutron [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1379.812367] env[68437]: DEBUG nova.network.neutron [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Updating instance_info_cache with network_info: [{"id": "e8fbb16d-1663-49af-b989-d8e689d060c7", "address": "fa:16:3e:3f:15:6d", "network": {"id": "8e67b0e1-bfe1-4010-a060-e471ca6414a7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-73434191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6f6382f6c6843529a37d7c62837523a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8fbb16d-16", "ovs_interfaceid": "e8fbb16d-1663-49af-b989-d8e689d060c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.236191] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquiring lock "88191506-b278-4502-b72d-07169f4fd6a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1380.236557] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "88191506-b278-4502-b72d-07169f4fd6a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1380.236651] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquiring lock "88191506-b278-4502-b72d-07169f4fd6a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1380.236836] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "88191506-b278-4502-b72d-07169f4fd6a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1380.237019] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "88191506-b278-4502-b72d-07169f4fd6a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.239170] env[68437]: INFO nova.compute.manager [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Terminating instance [ 1380.315577] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Releasing lock "refresh_cache-3562f6dc-2596-4878-96f5-1e0da54a168b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1380.316520] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3de19b2-b7c9-49a1-bc6c-edd68ea1696a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.323425] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Resuming the VM {{(pid=68437) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1380.323703] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e9c7652-01aa-4665-af27-e08b0819c391 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.329787] env[68437]: DEBUG oslo_vmware.api [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1380.329787] env[68437]: value = "task-2945249" [ 1380.329787] env[68437]: _type = "Task" [ 1380.329787] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.336713] env[68437]: DEBUG oslo_vmware.api [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.743477] env[68437]: DEBUG nova.compute.manager [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1380.743753] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1380.744694] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fde68b-107a-43b3-9e60-39cd93f46418 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.753872] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1380.754275] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2930e75-58be-4ac9-a645-f115524f7768 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.760736] env[68437]: DEBUG oslo_vmware.api [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1380.760736] env[68437]: value = "task-2945250" [ 1380.760736] env[68437]: _type = "Task" [ 1380.760736] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.770042] env[68437]: DEBUG oslo_vmware.api [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.840155] env[68437]: DEBUG oslo_vmware.api [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945249, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.271494] env[68437]: DEBUG oslo_vmware.api [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945250, 'name': PowerOffVM_Task, 'duration_secs': 0.359022} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.271896] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1381.272033] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1381.272271] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f182608-0b16-4cf6-9ee7-d04ef011c348 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.339945] env[68437]: DEBUG oslo_vmware.api [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945249, 'name': PowerOnVM_Task, 'duration_secs': 0.517792} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.341086] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Resumed the VM {{(pid=68437) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1381.341292] env[68437]: DEBUG nova.compute.manager [None req-0b3f3bdd-94c1-4f11-8b31-83cae6e72831 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1381.341594] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1381.341846] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1381.342095] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Deleting the datastore file [datastore1] 88191506-b278-4502-b72d-07169f4fd6a6 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1381.342978] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329b62ed-7788-4fc6-8ddd-d46c9752513b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.345434] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00c9affb-72c8-4f2e-a8b9-6812f82af824 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.353787] env[68437]: DEBUG oslo_vmware.api [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for the task: (returnval){ [ 1381.353787] env[68437]: value = "task-2945252" [ 1381.353787] env[68437]: _type = "Task" [ 1381.353787] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.362446] env[68437]: DEBUG oslo_vmware.api [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.867843] env[68437]: DEBUG oslo_vmware.api [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Task: {'id': task-2945252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322226} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.868091] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1381.868281] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1381.868639] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1381.868969] env[68437]: INFO nova.compute.manager [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1381.869360] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1381.870240] env[68437]: DEBUG nova.compute.manager [-] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1381.870240] env[68437]: DEBUG nova.network.neutron [-] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1382.288596] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "3562f6dc-2596-4878-96f5-1e0da54a168b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1382.288841] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.289044] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "3562f6dc-2596-4878-96f5-1e0da54a168b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1382.289230] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.289398] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.291172] env[68437]: INFO nova.compute.manager [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Terminating instance [ 1382.306440] env[68437]: DEBUG nova.compute.manager [req-05b3184f-c53f-4287-a852-a3f42f12d68a req-d89b1cd1-48e0-41e4-a208-5e38829d596c service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Received event network-vif-deleted-59123504-f773-43df-9389-0f86ff9b7e52 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1382.306440] env[68437]: INFO nova.compute.manager [req-05b3184f-c53f-4287-a852-a3f42f12d68a req-d89b1cd1-48e0-41e4-a208-5e38829d596c service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Neutron deleted interface 59123504-f773-43df-9389-0f86ff9b7e52; detaching it from the instance and deleting it from the info cache [ 1382.306440] env[68437]: DEBUG nova.network.neutron [req-05b3184f-c53f-4287-a852-a3f42f12d68a req-d89b1cd1-48e0-41e4-a208-5e38829d596c service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.791689] env[68437]: DEBUG nova.network.neutron [-] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.794708] env[68437]: DEBUG nova.compute.manager [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1382.795030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1382.796303] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a21fa6-ab48-4d51-bd1e-c8b05fb2f219 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.806487] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1382.806732] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-405ea81b-48fc-4335-805c-2b79689d453c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.808664] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28173a01-11ff-409d-8ecb-cbb38843a143 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.816948] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e311a369-9c07-4f91-8e33-0b6c55442c0f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.829606] env[68437]: DEBUG oslo_vmware.api [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1382.829606] env[68437]: value = "task-2945253" [ 1382.829606] env[68437]: _type = "Task" [ 1382.829606] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.837667] env[68437]: DEBUG oslo_vmware.api [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945253, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.844487] env[68437]: DEBUG nova.compute.manager [req-05b3184f-c53f-4287-a852-a3f42f12d68a req-d89b1cd1-48e0-41e4-a208-5e38829d596c service nova] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Detach interface failed, port_id=59123504-f773-43df-9389-0f86ff9b7e52, reason: Instance 88191506-b278-4502-b72d-07169f4fd6a6 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1383.295110] env[68437]: INFO nova.compute.manager [-] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Took 1.42 seconds to deallocate network for instance. [ 1383.339498] env[68437]: DEBUG oslo_vmware.api [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945253, 'name': PowerOffVM_Task, 'duration_secs': 0.228314} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.339915] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1383.340233] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1383.340506] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d20b4cb-405c-4728-82e3-b810313a7048 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.407655] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1383.407895] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1383.408082] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleting the datastore file [datastore1] 3562f6dc-2596-4878-96f5-1e0da54a168b {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1383.408346] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae344a5c-40ca-4f95-9a01-e3a935c4bb51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.414749] env[68437]: DEBUG oslo_vmware.api [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for the task: (returnval){ [ 1383.414749] env[68437]: value = "task-2945255" [ 1383.414749] env[68437]: _type = "Task" [ 1383.414749] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.422251] env[68437]: DEBUG oslo_vmware.api [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.801875] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1383.802255] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1383.802533] env[68437]: DEBUG nova.objects.instance [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lazy-loading 'resources' on Instance uuid 88191506-b278-4502-b72d-07169f4fd6a6 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1383.926016] env[68437]: DEBUG oslo_vmware.api [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Task: {'id': task-2945255, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155865} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.926347] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1383.926545] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1383.926721] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1383.926890] env[68437]: INFO nova.compute.manager [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1383.927142] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1383.927331] env[68437]: DEBUG nova.compute.manager [-] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1383.927428] env[68437]: DEBUG nova.network.neutron [-] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1384.341654] env[68437]: DEBUG nova.compute.manager [req-c23b0996-bed2-4801-b84c-6aebf6714058 req-dfc3f5bb-b223-49a9-b782-2fa2d4745c28 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Received event network-vif-deleted-e8fbb16d-1663-49af-b989-d8e689d060c7 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1384.341892] env[68437]: INFO nova.compute.manager [req-c23b0996-bed2-4801-b84c-6aebf6714058 req-dfc3f5bb-b223-49a9-b782-2fa2d4745c28 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Neutron deleted interface e8fbb16d-1663-49af-b989-d8e689d060c7; detaching it from the instance and deleting it from the info cache [ 1384.342034] env[68437]: DEBUG nova.network.neutron [req-c23b0996-bed2-4801-b84c-6aebf6714058 req-dfc3f5bb-b223-49a9-b782-2fa2d4745c28 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.362683] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f67591-d03b-47b5-8516-8612c6dfb24d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.370008] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ac37a2-f937-4cd5-82b8-6696828ff42c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.399464] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872ec96e-fe9f-40e7-aa6e-bb184797e96c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.406394] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd55292-d35b-4c58-ae50-713f16508843 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.418987] env[68437]: DEBUG nova.compute.provider_tree [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1384.828032] env[68437]: DEBUG nova.network.neutron [-] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.844760] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-057fa2ad-c72b-4c42-9009-45b0befb9ec0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.854417] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1575df-f9fa-48ec-b9be-83a212891528 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.881323] env[68437]: DEBUG nova.compute.manager [req-c23b0996-bed2-4801-b84c-6aebf6714058 req-dfc3f5bb-b223-49a9-b782-2fa2d4745c28 service nova] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Detach interface failed, port_id=e8fbb16d-1663-49af-b989-d8e689d060c7, reason: Instance 3562f6dc-2596-4878-96f5-1e0da54a168b could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1384.922077] env[68437]: DEBUG nova.scheduler.client.report [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1385.330263] env[68437]: INFO nova.compute.manager [-] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Took 1.40 seconds to deallocate network for instance. [ 1385.427591] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1385.444904] env[68437]: INFO nova.scheduler.client.report [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Deleted allocations for instance 88191506-b278-4502-b72d-07169f4fd6a6 [ 1385.836969] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1385.837288] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.837519] env[68437]: DEBUG nova.objects.instance [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lazy-loading 'resources' on Instance uuid 3562f6dc-2596-4878-96f5-1e0da54a168b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1385.952463] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ff4f493b-6b08-492e-94e6-e805e3ee954c tempest-ServersTestManualDisk-297762748 tempest-ServersTestManualDisk-297762748-project-member] Lock "88191506-b278-4502-b72d-07169f4fd6a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.716s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1386.377894] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1fa168-0d4f-44a2-a903-72802267dca5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.385594] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88d8571-7412-49e7-9eff-876ed9c0a772 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.414593] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4deef1-c80b-4c2e-8a03-6e7cc8e4fc58 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.422762] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb655c5-e7e6-4ee7-af4c-bfeb1d4932ed {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.437073] env[68437]: DEBUG nova.compute.provider_tree [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.939861] env[68437]: DEBUG nova.scheduler.client.report [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1387.444729] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.466799] env[68437]: INFO nova.scheduler.client.report [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Deleted allocations for instance 3562f6dc-2596-4878-96f5-1e0da54a168b [ 1387.974444] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c7d262c0-88ed-4140-88f7-1134458493c7 tempest-ServerActionsTestJSON-1568522030 tempest-ServerActionsTestJSON-1568522030-project-member] Lock "3562f6dc-2596-4878-96f5-1e0da54a168b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.685s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.670236] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "8b80927c-1cda-4652-8c2e-df39c93bae78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1391.670518] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.088676] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.088935] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.089107] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.089270] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.089413] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1392.173381] env[68437]: DEBUG nova.compute.manager [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1392.231672] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.697274] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1392.697789] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1392.700421] env[68437]: INFO nova.compute.claims [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1393.226401] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.760348] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192a81f2-2967-42dc-9019-7679a961de6f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.768035] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd6b093-d1c6-4223-88de-73e612cb10ec {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.800051] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484df97c-0df1-4c29-9e8d-c2bb8e55cb51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.807086] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0e4d14-e54d-4ecd-923d-14b2802c8923 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.819914] env[68437]: DEBUG nova.compute.provider_tree [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1394.110729] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.110970] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.230691] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.230958] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.326020] env[68437]: DEBUG nova.scheduler.client.report [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1394.613142] env[68437]: DEBUG nova.compute.manager [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1394.736819] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.829205] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.829793] env[68437]: DEBUG nova.compute.manager [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1394.832419] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.096s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.832640] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.832880] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1394.833941] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c6eb8a-409b-4d42-a78e-8f23bddd4388 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.843518] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5515e85b-b4f5-49db-b246-b78fd22d93fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.848611] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.848823] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.849022] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.849206] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1394.849376] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.851575] env[68437]: INFO nova.compute.manager [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Terminating instance [ 1394.862038] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fca5cb0-aef9-4d7f-b100-2225e486ca56 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.870040] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ced514-8bd9-43b0-8b30-d3b1454cf717 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.898801] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180560MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1394.898960] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.899142] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.133183] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.335494] env[68437]: DEBUG nova.compute.utils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1395.336935] env[68437]: DEBUG nova.compute.manager [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1395.337139] env[68437]: DEBUG nova.network.neutron [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1395.365945] env[68437]: DEBUG nova.compute.manager [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1395.366250] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1395.367235] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e89ce5-5ca6-4762-adc5-7d1b2b465b49 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.373536] env[68437]: DEBUG nova.policy [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5f65dac5fd04c59b33cb454ee1c3e2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa08e0c3081143cbb8f4d00d7e5cf222', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1395.377449] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1395.377673] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-992c27c3-541c-406c-8774-b616ba334521 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.385067] env[68437]: DEBUG oslo_vmware.api [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1395.385067] env[68437]: value = "task-2945256" [ 1395.385067] env[68437]: _type = "Task" [ 1395.385067] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.392598] env[68437]: DEBUG oslo_vmware.api [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.644610] env[68437]: DEBUG nova.network.neutron [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Successfully created port: 69e7c375-5473-4c45-822e-b368dafd9ff9 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1395.840563] env[68437]: DEBUG nova.compute.manager [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1395.895330] env[68437]: DEBUG oslo_vmware.api [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945256, 'name': PowerOffVM_Task, 'duration_secs': 0.17835} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.895553] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1395.895730] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1395.895967] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c6b3ccd-d497-4b76-90df-2204b0416d3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.922013] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 6d4f617c-97d6-4721-97c0-7a6b8676d681 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1395.922211] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8b80927c-1cda-4652-8c2e-df39c93bae78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1395.957199] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1395.957532] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Deleting contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1395.957747] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleting the datastore file [datastore2] 6d4f617c-97d6-4721-97c0-7a6b8676d681 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1395.958014] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42afb447-2fa4-468f-a279-467c3b343253 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.964464] env[68437]: DEBUG oslo_vmware.api [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for the task: (returnval){ [ 1395.964464] env[68437]: value = "task-2945258" [ 1395.964464] env[68437]: _type = "Task" [ 1395.964464] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.972385] env[68437]: DEBUG oslo_vmware.api [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.425328] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 95b8784f-89e4-4ca3-b852-db9417e5b8b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1396.425576] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1396.425729] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1396.473888] env[68437]: DEBUG oslo_vmware.api [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Task: {'id': task-2945258, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143561} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.474287] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1396.474381] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Deleted contents of the VM from datastore datastore2 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1396.474627] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1396.474829] env[68437]: INFO nova.compute.manager [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1396.475095] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1396.475398] env[68437]: DEBUG nova.compute.manager [-] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1396.475398] env[68437]: DEBUG nova.network.neutron [-] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1396.478293] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e5d786-149c-4e14-b63f-b8a8a34f1bf7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.486055] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba7f605-dbd3-4709-ad6d-5fca5be4685d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.515154] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadd4765-66fa-4e47-ab9c-a4b0136c3e4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.522747] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213a435a-5a2e-49de-98cb-4f62f42dbd9e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.537198] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.854684] env[68437]: DEBUG nova.compute.manager [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1396.881758] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1396.882017] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1396.882537] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1396.882537] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1396.882537] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1396.882756] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1396.882849] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1396.883019] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1396.883404] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1396.883594] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1396.883774] env[68437]: DEBUG nova.virt.hardware [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1396.884636] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc0cce9-b7d3-45cc-b501-cff2e75a1921 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.892908] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1418d3-b781-4ebe-996e-3ad8293d9ee0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.948667] env[68437]: DEBUG nova.compute.manager [req-5bfb6bf9-2fb8-4a3f-8f09-f1727af24d62 req-9e8d3dad-c612-4326-860e-e0ab0e65b488 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Received event network-vif-deleted-d3799e51-78a6-4580-a8ae-68366989843d {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1396.948768] env[68437]: INFO nova.compute.manager [req-5bfb6bf9-2fb8-4a3f-8f09-f1727af24d62 req-9e8d3dad-c612-4326-860e-e0ab0e65b488 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Neutron deleted interface d3799e51-78a6-4580-a8ae-68366989843d; detaching it from the instance and deleting it from the info cache [ 1396.948935] env[68437]: DEBUG nova.network.neutron [req-5bfb6bf9-2fb8-4a3f-8f09-f1727af24d62 req-9e8d3dad-c612-4326-860e-e0ab0e65b488 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.042113] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1397.114215] env[68437]: DEBUG nova.network.neutron [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Successfully updated port: 69e7c375-5473-4c45-822e-b368dafd9ff9 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1397.432376] env[68437]: DEBUG nova.network.neutron [-] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.452089] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c28a6c5-322d-4d65-80f8-cbe634ada02b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.462273] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341e4d46-d58d-4044-921f-f56d536f9d5f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.485863] env[68437]: DEBUG nova.compute.manager [req-5bfb6bf9-2fb8-4a3f-8f09-f1727af24d62 req-9e8d3dad-c612-4326-860e-e0ab0e65b488 service nova] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Detach interface failed, port_id=d3799e51-78a6-4580-a8ae-68366989843d, reason: Instance 6d4f617c-97d6-4721-97c0-7a6b8676d681 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1397.547737] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1397.547737] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.649s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1397.548062] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.415s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1397.549617] env[68437]: INFO nova.compute.claims [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1397.616847] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.617052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1397.617158] env[68437]: DEBUG nova.network.neutron [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1397.935366] env[68437]: INFO nova.compute.manager [-] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Took 1.46 seconds to deallocate network for instance. [ 1398.158864] env[68437]: DEBUG nova.network.neutron [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1398.285064] env[68437]: DEBUG nova.network.neutron [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance_info_cache with network_info: [{"id": "69e7c375-5473-4c45-822e-b368dafd9ff9", "address": "fa:16:3e:47:3c:79", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e7c375-54", "ovs_interfaceid": "69e7c375-5473-4c45-822e-b368dafd9ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.441913] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.606725] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8dcc26-10c3-42e2-aa4c-e5869a7429b6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.614558] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17fbb59-490d-41e2-ab9b-d09e59c08cf8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.643849] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd1a641-bd10-4d07-9102-608aa7bc4f02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.650519] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36592d1c-b993-4e9f-bde7-3757cfbb2b62 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.663096] env[68437]: DEBUG nova.compute.provider_tree [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1398.788182] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1398.788567] env[68437]: DEBUG nova.compute.manager [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Instance network_info: |[{"id": "69e7c375-5473-4c45-822e-b368dafd9ff9", "address": "fa:16:3e:47:3c:79", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e7c375-54", "ovs_interfaceid": "69e7c375-5473-4c45-822e-b368dafd9ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1398.789028] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:3c:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69e7c375-5473-4c45-822e-b368dafd9ff9', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1398.796527] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating folder: Project (aa08e0c3081143cbb8f4d00d7e5cf222). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1398.796847] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca853f3b-bf23-4ba3-83c6-1d56743d117b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.808681] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Created folder: Project (aa08e0c3081143cbb8f4d00d7e5cf222) in parent group-v590848. [ 1398.808856] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating folder: Instances. Parent ref: group-v591166. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1398.809083] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5cdc67d7-b68a-4b8e-9308-0dd88d47dc12 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.818264] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Created folder: Instances in parent group-v591166. [ 1398.818530] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1398.818762] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1398.818987] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34ade4d8-774d-4f45-abaa-e38a17bf2138 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.838078] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1398.838078] env[68437]: value = "task-2945261" [ 1398.838078] env[68437]: _type = "Task" [ 1398.838078] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.844801] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945261, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.988840] env[68437]: DEBUG nova.compute.manager [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Received event network-vif-plugged-69e7c375-5473-4c45-822e-b368dafd9ff9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1398.989174] env[68437]: DEBUG oslo_concurrency.lockutils [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] Acquiring lock "8b80927c-1cda-4652-8c2e-df39c93bae78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1398.989517] env[68437]: DEBUG oslo_concurrency.lockutils [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1398.989721] env[68437]: DEBUG oslo_concurrency.lockutils [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1398.989942] env[68437]: DEBUG nova.compute.manager [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] No waiting events found dispatching network-vif-plugged-69e7c375-5473-4c45-822e-b368dafd9ff9 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1398.990201] env[68437]: WARNING nova.compute.manager [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Received unexpected event network-vif-plugged-69e7c375-5473-4c45-822e-b368dafd9ff9 for instance with vm_state building and task_state spawning. [ 1398.990410] env[68437]: DEBUG nova.compute.manager [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Received event network-changed-69e7c375-5473-4c45-822e-b368dafd9ff9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1398.990588] env[68437]: DEBUG nova.compute.manager [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Refreshing instance network info cache due to event network-changed-69e7c375-5473-4c45-822e-b368dafd9ff9. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1398.990897] env[68437]: DEBUG oslo_concurrency.lockutils [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] Acquiring lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.991183] env[68437]: DEBUG oslo_concurrency.lockutils [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] Acquired lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1398.991320] env[68437]: DEBUG nova.network.neutron [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Refreshing network info cache for port 69e7c375-5473-4c45-822e-b368dafd9ff9 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1399.166573] env[68437]: DEBUG nova.scheduler.client.report [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1399.347590] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945261, 'name': CreateVM_Task, 'duration_secs': 0.323558} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.347771] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1399.348598] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.348665] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1399.349017] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1399.349656] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ba85cfe-d944-4bd4-a986-2538812a2c5b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.353555] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1399.353555] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5223ab2a-a07e-a6e1-f64c-52c23b1bcfa7" [ 1399.353555] env[68437]: _type = "Task" [ 1399.353555] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.361100] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5223ab2a-a07e-a6e1-f64c-52c23b1bcfa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.669364] env[68437]: DEBUG nova.network.neutron [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updated VIF entry in instance network info cache for port 69e7c375-5473-4c45-822e-b368dafd9ff9. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1399.669735] env[68437]: DEBUG nova.network.neutron [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance_info_cache with network_info: [{"id": "69e7c375-5473-4c45-822e-b368dafd9ff9", "address": "fa:16:3e:47:3c:79", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e7c375-54", "ovs_interfaceid": "69e7c375-5473-4c45-822e-b368dafd9ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.671580] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.123s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1399.671846] env[68437]: DEBUG nova.compute.manager [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1399.674526] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.233s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1399.674743] env[68437]: DEBUG nova.objects.instance [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lazy-loading 'resources' on Instance uuid 6d4f617c-97d6-4721-97c0-7a6b8676d681 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1399.865321] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5223ab2a-a07e-a6e1-f64c-52c23b1bcfa7, 'name': SearchDatastore_Task, 'duration_secs': 0.009805} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.865653] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1399.865892] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1399.866141] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.866291] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1399.866490] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1399.866735] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e90c7c70-fb4b-4ad0-bde9-4c715d91cb5b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.875574] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1399.875764] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1399.876484] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-617425bd-3fcd-40bb-8671-da4a09751e91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.882267] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1399.882267] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52618402-24db-a69b-0508-832fcb8c0b5f" [ 1399.882267] env[68437]: _type = "Task" [ 1399.882267] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.889745] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52618402-24db-a69b-0508-832fcb8c0b5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.175744] env[68437]: DEBUG oslo_concurrency.lockutils [req-ade127f7-b1ce-425c-8d24-7d08dd72f8ee req-51405710-df90-44bc-83a8-394ce0efe460 service nova] Releasing lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1400.177217] env[68437]: DEBUG nova.compute.utils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1400.181548] env[68437]: DEBUG nova.compute.manager [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1400.181718] env[68437]: DEBUG nova.network.neutron [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1400.216978] env[68437]: DEBUG nova.policy [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c41fdd9aa844ec4aef0f0d2989ea63b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '490b965164a14c9faf6b0329886d617e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1400.224218] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9c4a31-a4c0-4f83-a15d-40a71cba3280 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.231918] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd6225f-d881-4c43-af22-cafb67e7f3f5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.263580] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9191bb53-4dfa-461e-8120-79e1097b2232 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.270353] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9507877-5f58-471f-a5e8-766af9ae4397 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.282872] env[68437]: DEBUG nova.compute.provider_tree [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.391854] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52618402-24db-a69b-0508-832fcb8c0b5f, 'name': SearchDatastore_Task, 'duration_secs': 0.008139} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.392651] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d76139e-db58-45af-bcf7-748b61393b0b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.397616] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1400.397616] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5291c5c9-26f9-973f-7384-e51b14565f92" [ 1400.397616] env[68437]: _type = "Task" [ 1400.397616] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.404865] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5291c5c9-26f9-973f-7384-e51b14565f92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.500099] env[68437]: DEBUG nova.network.neutron [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Successfully created port: 289592f8-bb26-4f97-ac37-15183e5f59e2 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1400.682799] env[68437]: DEBUG nova.compute.manager [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1400.786357] env[68437]: DEBUG nova.scheduler.client.report [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1400.909024] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5291c5c9-26f9-973f-7384-e51b14565f92, 'name': SearchDatastore_Task, 'duration_secs': 0.009591} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.909024] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1400.909024] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 8b80927c-1cda-4652-8c2e-df39c93bae78/8b80927c-1cda-4652-8c2e-df39c93bae78.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1400.909413] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-705316a1-5498-41fd-b5e6-b76b07efc4fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.916441] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1400.916441] env[68437]: value = "task-2945262" [ 1400.916441] env[68437]: _type = "Task" [ 1400.916441] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.924192] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.290981] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.616s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.314100] env[68437]: INFO nova.scheduler.client.report [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Deleted allocations for instance 6d4f617c-97d6-4721-97c0-7a6b8676d681 [ 1401.426480] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945262, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435383} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.426723] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 8b80927c-1cda-4652-8c2e-df39c93bae78/8b80927c-1cda-4652-8c2e-df39c93bae78.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1401.426894] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1401.427164] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96787f65-0dcb-49f3-bd27-e8debcc628fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.433859] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1401.433859] env[68437]: value = "task-2945263" [ 1401.433859] env[68437]: _type = "Task" [ 1401.433859] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.442640] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945263, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.693297] env[68437]: DEBUG nova.compute.manager [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1401.720433] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1401.720677] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1401.720838] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1401.721033] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1401.721183] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1401.721333] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1401.721540] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1401.721698] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1401.721862] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1401.722029] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1401.722203] env[68437]: DEBUG nova.virt.hardware [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1401.723080] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c12895-3e35-4308-a440-4847d05d2d64 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.731264] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86584a9-686c-4324-a405-c67d09aeda75 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.822354] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8dfdd02b-7d3e-4553-896e-41e7b9870526 tempest-AttachVolumeShelveTestJSON-1196772587 tempest-AttachVolumeShelveTestJSON-1196772587-project-member] Lock "6d4f617c-97d6-4721-97c0-7a6b8676d681" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.973s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.853028] env[68437]: DEBUG nova.compute.manager [req-6ecdf1d3-b619-4e68-a20a-ee1848d252ad req-a42b7349-8478-4552-b79c-24b8d243da79 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Received event network-vif-plugged-289592f8-bb26-4f97-ac37-15183e5f59e2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1401.853260] env[68437]: DEBUG oslo_concurrency.lockutils [req-6ecdf1d3-b619-4e68-a20a-ee1848d252ad req-a42b7349-8478-4552-b79c-24b8d243da79 service nova] Acquiring lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1401.853465] env[68437]: DEBUG oslo_concurrency.lockutils [req-6ecdf1d3-b619-4e68-a20a-ee1848d252ad req-a42b7349-8478-4552-b79c-24b8d243da79 service nova] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1401.853737] env[68437]: DEBUG oslo_concurrency.lockutils [req-6ecdf1d3-b619-4e68-a20a-ee1848d252ad req-a42b7349-8478-4552-b79c-24b8d243da79 service nova] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.853922] env[68437]: DEBUG nova.compute.manager [req-6ecdf1d3-b619-4e68-a20a-ee1848d252ad req-a42b7349-8478-4552-b79c-24b8d243da79 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] No waiting events found dispatching network-vif-plugged-289592f8-bb26-4f97-ac37-15183e5f59e2 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1401.854113] env[68437]: WARNING nova.compute.manager [req-6ecdf1d3-b619-4e68-a20a-ee1848d252ad req-a42b7349-8478-4552-b79c-24b8d243da79 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Received unexpected event network-vif-plugged-289592f8-bb26-4f97-ac37-15183e5f59e2 for instance with vm_state building and task_state spawning. [ 1401.938250] env[68437]: DEBUG nova.network.neutron [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Successfully updated port: 289592f8-bb26-4f97-ac37-15183e5f59e2 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1401.945803] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945263, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061592} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.946563] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1401.947287] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c970a2-179f-4143-8920-96835ebccaa9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.971155] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 8b80927c-1cda-4652-8c2e-df39c93bae78/8b80927c-1cda-4652-8c2e-df39c93bae78.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1401.971689] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28d988c0-7d60-40ae-972d-b57f71cc5fba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.991365] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1401.991365] env[68437]: value = "task-2945264" [ 1401.991365] env[68437]: _type = "Task" [ 1401.991365] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.998952] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945264, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.440765] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.441021] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1402.441104] env[68437]: DEBUG nova.network.neutron [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1402.500799] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945264, 'name': ReconfigVM_Task, 'duration_secs': 0.271391} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.501089] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 8b80927c-1cda-4652-8c2e-df39c93bae78/8b80927c-1cda-4652-8c2e-df39c93bae78.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1402.501714] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33683b38-c28b-43bd-a717-dba43b49be7c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.508726] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1402.508726] env[68437]: value = "task-2945265" [ 1402.508726] env[68437]: _type = "Task" [ 1402.508726] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.516777] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945265, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.971930] env[68437]: DEBUG nova.network.neutron [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1403.020884] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945265, 'name': Rename_Task, 'duration_secs': 0.136896} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.021151] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1403.021404] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b37177d6-243b-47c9-8c6c-6976bbe112d7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.027905] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1403.027905] env[68437]: value = "task-2945266" [ 1403.027905] env[68437]: _type = "Task" [ 1403.027905] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.037370] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945266, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.116945] env[68437]: DEBUG nova.network.neutron [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updating instance_info_cache with network_info: [{"id": "289592f8-bb26-4f97-ac37-15183e5f59e2", "address": "fa:16:3e:de:5d:4f", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289592f8-bb", "ovs_interfaceid": "289592f8-bb26-4f97-ac37-15183e5f59e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.537441] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945266, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.620285] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Releasing lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1403.620632] env[68437]: DEBUG nova.compute.manager [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Instance network_info: |[{"id": "289592f8-bb26-4f97-ac37-15183e5f59e2", "address": "fa:16:3e:de:5d:4f", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289592f8-bb", "ovs_interfaceid": "289592f8-bb26-4f97-ac37-15183e5f59e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1403.621116] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:5d:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '289592f8-bb26-4f97-ac37-15183e5f59e2', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1403.629017] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Creating folder: Project (490b965164a14c9faf6b0329886d617e). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1403.629304] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52feefb9-a5af-4061-8d1c-2b468c27c664 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.640529] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Created folder: Project (490b965164a14c9faf6b0329886d617e) in parent group-v590848. [ 1403.640762] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Creating folder: Instances. Parent ref: group-v591169. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1403.641035] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60652eb1-e29d-4bdf-8934-74d56fbd8cf3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.650768] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Created folder: Instances in parent group-v591169. [ 1403.651037] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1403.651238] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1403.651459] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f581b88-7f08-4db4-bb68-acb6c0b59b91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.670892] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1403.670892] env[68437]: value = "task-2945270" [ 1403.670892] env[68437]: _type = "Task" [ 1403.670892] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.678114] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945270, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.881964] env[68437]: DEBUG nova.compute.manager [req-eaad0383-6c2b-40d6-8da6-f6b9e197df1f req-f25496cd-2de9-4370-b4ad-63fddeff394b service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Received event network-changed-289592f8-bb26-4f97-ac37-15183e5f59e2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1403.882193] env[68437]: DEBUG nova.compute.manager [req-eaad0383-6c2b-40d6-8da6-f6b9e197df1f req-f25496cd-2de9-4370-b4ad-63fddeff394b service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Refreshing instance network info cache due to event network-changed-289592f8-bb26-4f97-ac37-15183e5f59e2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1403.882410] env[68437]: DEBUG oslo_concurrency.lockutils [req-eaad0383-6c2b-40d6-8da6-f6b9e197df1f req-f25496cd-2de9-4370-b4ad-63fddeff394b service nova] Acquiring lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.882553] env[68437]: DEBUG oslo_concurrency.lockutils [req-eaad0383-6c2b-40d6-8da6-f6b9e197df1f req-f25496cd-2de9-4370-b4ad-63fddeff394b service nova] Acquired lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1403.882715] env[68437]: DEBUG nova.network.neutron [req-eaad0383-6c2b-40d6-8da6-f6b9e197df1f req-f25496cd-2de9-4370-b4ad-63fddeff394b service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Refreshing network info cache for port 289592f8-bb26-4f97-ac37-15183e5f59e2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1404.037892] env[68437]: DEBUG oslo_vmware.api [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945266, 'name': PowerOnVM_Task, 'duration_secs': 0.663442} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.038202] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1404.038410] env[68437]: INFO nova.compute.manager [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1404.038592] env[68437]: DEBUG nova.compute.manager [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1404.039374] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716e3ab9-26ec-4864-8d8e-e441d98fd4d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.182834] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945270, 'name': CreateVM_Task, 'duration_secs': 0.353084} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.183091] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1404.183805] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.183982] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1404.184384] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1404.184640] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a240ef63-d55a-4896-b313-30aae2b31497 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.189443] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1404.189443] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521fc2d1-870d-3751-de50-02f323032869" [ 1404.189443] env[68437]: _type = "Task" [ 1404.189443] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.196875] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521fc2d1-870d-3751-de50-02f323032869, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.557406] env[68437]: DEBUG nova.network.neutron [req-eaad0383-6c2b-40d6-8da6-f6b9e197df1f req-f25496cd-2de9-4370-b4ad-63fddeff394b service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updated VIF entry in instance network info cache for port 289592f8-bb26-4f97-ac37-15183e5f59e2. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1404.557766] env[68437]: DEBUG nova.network.neutron [req-eaad0383-6c2b-40d6-8da6-f6b9e197df1f req-f25496cd-2de9-4370-b4ad-63fddeff394b service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updating instance_info_cache with network_info: [{"id": "289592f8-bb26-4f97-ac37-15183e5f59e2", "address": "fa:16:3e:de:5d:4f", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289592f8-bb", "ovs_interfaceid": "289592f8-bb26-4f97-ac37-15183e5f59e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.558937] env[68437]: INFO nova.compute.manager [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Took 11.88 seconds to build instance. [ 1404.699648] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521fc2d1-870d-3751-de50-02f323032869, 'name': SearchDatastore_Task, 'duration_secs': 0.009515} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.699894] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1404.700182] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1404.700460] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.700610] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1404.700795] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.701065] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-635c334a-00e7-47fd-a0f5-cbd3b336510b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.709636] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.709808] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1404.710519] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a87a13c8-29b8-4ead-8a0f-11ee550fa6ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.715603] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1404.715603] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528de226-16a9-febc-54b9-64c62cf4cd80" [ 1404.715603] env[68437]: _type = "Task" [ 1404.715603] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.723570] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528de226-16a9-febc-54b9-64c62cf4cd80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.061250] env[68437]: DEBUG oslo_concurrency.lockutils [None req-47be0491-b0c1-42b4-94e2-b63fec1b2a9c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.390s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1405.061687] env[68437]: DEBUG oslo_concurrency.lockutils [req-eaad0383-6c2b-40d6-8da6-f6b9e197df1f req-f25496cd-2de9-4370-b4ad-63fddeff394b service nova] Releasing lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1405.229028] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528de226-16a9-febc-54b9-64c62cf4cd80, 'name': SearchDatastore_Task, 'duration_secs': 0.008172} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.229313] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d14020a-c5b4-4d86-8572-dccb84d68982 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.235443] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1405.235443] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c194f8-4986-bea5-1d1d-19a1c4b0315b" [ 1405.235443] env[68437]: _type = "Task" [ 1405.235443] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.244218] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c194f8-4986-bea5-1d1d-19a1c4b0315b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.746079] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c194f8-4986-bea5-1d1d-19a1c4b0315b, 'name': SearchDatastore_Task, 'duration_secs': 0.010007} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.746427] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1405.746672] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 95b8784f-89e4-4ca3-b852-db9417e5b8b8/95b8784f-89e4-4ca3-b852-db9417e5b8b8.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1405.746862] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3cc05ff6-8eec-4682-8da0-41f89edbc3e5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.752891] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1405.752891] env[68437]: value = "task-2945271" [ 1405.752891] env[68437]: _type = "Task" [ 1405.752891] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.760249] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945271, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.999667] env[68437]: DEBUG nova.compute.manager [req-47c0b397-3b4a-4475-a0cc-f8f5ab9a420c req-444a9260-d5c6-4ca7-8aed-086207c32961 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Received event network-changed-69e7c375-5473-4c45-822e-b368dafd9ff9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1405.999895] env[68437]: DEBUG nova.compute.manager [req-47c0b397-3b4a-4475-a0cc-f8f5ab9a420c req-444a9260-d5c6-4ca7-8aed-086207c32961 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Refreshing instance network info cache due to event network-changed-69e7c375-5473-4c45-822e-b368dafd9ff9. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1406.000151] env[68437]: DEBUG oslo_concurrency.lockutils [req-47c0b397-3b4a-4475-a0cc-f8f5ab9a420c req-444a9260-d5c6-4ca7-8aed-086207c32961 service nova] Acquiring lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.000299] env[68437]: DEBUG oslo_concurrency.lockutils [req-47c0b397-3b4a-4475-a0cc-f8f5ab9a420c req-444a9260-d5c6-4ca7-8aed-086207c32961 service nova] Acquired lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1406.000463] env[68437]: DEBUG nova.network.neutron [req-47c0b397-3b4a-4475-a0cc-f8f5ab9a420c req-444a9260-d5c6-4ca7-8aed-086207c32961 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Refreshing network info cache for port 69e7c375-5473-4c45-822e-b368dafd9ff9 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1406.263562] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945271, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473365} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.263872] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 95b8784f-89e4-4ca3-b852-db9417e5b8b8/95b8784f-89e4-4ca3-b852-db9417e5b8b8.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1406.264123] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1406.264394] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfc76863-d07d-4503-a50d-1734f4d8a410 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.270996] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1406.270996] env[68437]: value = "task-2945272" [ 1406.270996] env[68437]: _type = "Task" [ 1406.270996] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.280174] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945272, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.724740] env[68437]: DEBUG nova.network.neutron [req-47c0b397-3b4a-4475-a0cc-f8f5ab9a420c req-444a9260-d5c6-4ca7-8aed-086207c32961 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updated VIF entry in instance network info cache for port 69e7c375-5473-4c45-822e-b368dafd9ff9. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1406.726133] env[68437]: DEBUG nova.network.neutron [req-47c0b397-3b4a-4475-a0cc-f8f5ab9a420c req-444a9260-d5c6-4ca7-8aed-086207c32961 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance_info_cache with network_info: [{"id": "69e7c375-5473-4c45-822e-b368dafd9ff9", "address": "fa:16:3e:47:3c:79", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e7c375-54", "ovs_interfaceid": "69e7c375-5473-4c45-822e-b368dafd9ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.780803] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945272, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064505} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.781138] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1406.781865] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1ec28f-dc13-4f48-82ef-6a0ff6bcc529 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.803951] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 95b8784f-89e4-4ca3-b852-db9417e5b8b8/95b8784f-89e4-4ca3-b852-db9417e5b8b8.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1406.804512] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06943888-5ac1-49ed-a66a-f568818becc2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.824022] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1406.824022] env[68437]: value = "task-2945273" [ 1406.824022] env[68437]: _type = "Task" [ 1406.824022] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.834021] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945273, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.227502] env[68437]: DEBUG oslo_concurrency.lockutils [req-47c0b397-3b4a-4475-a0cc-f8f5ab9a420c req-444a9260-d5c6-4ca7-8aed-086207c32961 service nova] Releasing lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1407.333595] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945273, 'name': ReconfigVM_Task, 'duration_secs': 0.276864} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.333880] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 95b8784f-89e4-4ca3-b852-db9417e5b8b8/95b8784f-89e4-4ca3-b852-db9417e5b8b8.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1407.334578] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b09f4f22-761f-4c29-965d-2ecdf06a2a6b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.340732] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1407.340732] env[68437]: value = "task-2945274" [ 1407.340732] env[68437]: _type = "Task" [ 1407.340732] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.349355] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945274, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.851639] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945274, 'name': Rename_Task, 'duration_secs': 0.141465} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.852200] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1407.852371] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b743430-3173-45e3-a7df-2a505d59318f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.859948] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1407.859948] env[68437]: value = "task-2945275" [ 1407.859948] env[68437]: _type = "Task" [ 1407.859948] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.868860] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.370424] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945275, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.870828] env[68437]: DEBUG oslo_vmware.api [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945275, 'name': PowerOnVM_Task, 'duration_secs': 0.6976} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.871136] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1408.871275] env[68437]: INFO nova.compute.manager [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1408.871457] env[68437]: DEBUG nova.compute.manager [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1408.872221] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4d6025-1dd4-401f-8f94-3bc99310ecb8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.391097] env[68437]: INFO nova.compute.manager [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Took 14.27 seconds to build instance. [ 1409.523836] env[68437]: DEBUG nova.compute.manager [req-5059a255-1afb-4c6e-ae2e-e4a5f6a7a724 req-0074eae0-e07c-4c31-b3af-938245b86593 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Received event network-changed-289592f8-bb26-4f97-ac37-15183e5f59e2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1409.524085] env[68437]: DEBUG nova.compute.manager [req-5059a255-1afb-4c6e-ae2e-e4a5f6a7a724 req-0074eae0-e07c-4c31-b3af-938245b86593 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Refreshing instance network info cache due to event network-changed-289592f8-bb26-4f97-ac37-15183e5f59e2. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1409.524303] env[68437]: DEBUG oslo_concurrency.lockutils [req-5059a255-1afb-4c6e-ae2e-e4a5f6a7a724 req-0074eae0-e07c-4c31-b3af-938245b86593 service nova] Acquiring lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.524802] env[68437]: DEBUG oslo_concurrency.lockutils [req-5059a255-1afb-4c6e-ae2e-e4a5f6a7a724 req-0074eae0-e07c-4c31-b3af-938245b86593 service nova] Acquired lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1409.524802] env[68437]: DEBUG nova.network.neutron [req-5059a255-1afb-4c6e-ae2e-e4a5f6a7a724 req-0074eae0-e07c-4c31-b3af-938245b86593 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Refreshing network info cache for port 289592f8-bb26-4f97-ac37-15183e5f59e2 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1409.893731] env[68437]: DEBUG oslo_concurrency.lockutils [None req-96443797-c83e-4c36-b761-841c71e65be4 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.783s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1410.218645] env[68437]: DEBUG nova.network.neutron [req-5059a255-1afb-4c6e-ae2e-e4a5f6a7a724 req-0074eae0-e07c-4c31-b3af-938245b86593 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updated VIF entry in instance network info cache for port 289592f8-bb26-4f97-ac37-15183e5f59e2. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1410.219010] env[68437]: DEBUG nova.network.neutron [req-5059a255-1afb-4c6e-ae2e-e4a5f6a7a724 req-0074eae0-e07c-4c31-b3af-938245b86593 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updating instance_info_cache with network_info: [{"id": "289592f8-bb26-4f97-ac37-15183e5f59e2", "address": "fa:16:3e:de:5d:4f", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289592f8-bb", "ovs_interfaceid": "289592f8-bb26-4f97-ac37-15183e5f59e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.721925] env[68437]: DEBUG oslo_concurrency.lockutils [req-5059a255-1afb-4c6e-ae2e-e4a5f6a7a724 req-0074eae0-e07c-4c31-b3af-938245b86593 service nova] Releasing lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1442.490533] env[68437]: DEBUG nova.compute.manager [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1442.491454] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4b6bd7-6759-47c3-a7ea-9dd541da6654 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.002529] env[68437]: INFO nova.compute.manager [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] instance snapshotting [ 1443.003165] env[68437]: DEBUG nova.objects.instance [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'flavor' on Instance uuid 8b80927c-1cda-4652-8c2e-df39c93bae78 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1443.509287] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f29641-65b9-40eb-b55c-a1c36b40ee53 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.528562] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b450634-aefa-4cc5-be4d-16059bf8107a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.038195] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1444.038542] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-19051713-cb55-4d12-ad34-936c2b755b51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.046327] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1444.046327] env[68437]: value = "task-2945276" [ 1444.046327] env[68437]: _type = "Task" [ 1444.046327] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.054120] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945276, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.556507] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945276, 'name': CreateSnapshot_Task, 'duration_secs': 0.431822} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.556946] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1444.557518] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e06898-d3e4-4155-84eb-9384926357c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.073858] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1445.074184] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f15e9861-1e00-4c05-9fe4-344c3ab3683f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.083245] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1445.083245] env[68437]: value = "task-2945277" [ 1445.083245] env[68437]: _type = "Task" [ 1445.083245] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.090597] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945277, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.593318] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945277, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.095469] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945277, 'name': CloneVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.594163] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945277, 'name': CloneVM_Task, 'duration_secs': 1.067353} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.594516] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Created linked-clone VM from snapshot [ 1446.595108] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d08281-d81f-4641-abed-3acd083bb49f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.602514] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Uploading image 8909f9f6-3e9a-4327-bcd6-dd05b842bb35 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1446.622291] env[68437]: DEBUG oslo_vmware.rw_handles [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1446.622291] env[68437]: value = "vm-591173" [ 1446.622291] env[68437]: _type = "VirtualMachine" [ 1446.622291] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1446.622514] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-496a9cec-fab3-4505-b32d-4acdbf01f014 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.628546] env[68437]: DEBUG oslo_vmware.rw_handles [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease: (returnval){ [ 1446.628546] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f12ea8-3c42-a00b-be49-91c5e5143632" [ 1446.628546] env[68437]: _type = "HttpNfcLease" [ 1446.628546] env[68437]: } obtained for exporting VM: (result){ [ 1446.628546] env[68437]: value = "vm-591173" [ 1446.628546] env[68437]: _type = "VirtualMachine" [ 1446.628546] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1446.628782] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the lease: (returnval){ [ 1446.628782] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f12ea8-3c42-a00b-be49-91c5e5143632" [ 1446.628782] env[68437]: _type = "HttpNfcLease" [ 1446.628782] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1446.634301] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1446.634301] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f12ea8-3c42-a00b-be49-91c5e5143632" [ 1446.634301] env[68437]: _type = "HttpNfcLease" [ 1446.634301] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1447.136933] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1447.136933] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f12ea8-3c42-a00b-be49-91c5e5143632" [ 1447.136933] env[68437]: _type = "HttpNfcLease" [ 1447.136933] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1447.137245] env[68437]: DEBUG oslo_vmware.rw_handles [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1447.137245] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52f12ea8-3c42-a00b-be49-91c5e5143632" [ 1447.137245] env[68437]: _type = "HttpNfcLease" [ 1447.137245] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1447.137942] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2585a18f-197c-461e-a935-e4d29962d0d6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.144727] env[68437]: DEBUG oslo_vmware.rw_handles [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5213eed4-5104-ddcf-ffeb-13b026c2aef9/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1447.144902] env[68437]: DEBUG oslo_vmware.rw_handles [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5213eed4-5104-ddcf-ffeb-13b026c2aef9/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1447.232272] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7a2bc6e8-ca68-42a7-a305-56dc3ea5f8b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.428546] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1447.428802] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1447.932591] env[68437]: DEBUG nova.compute.utils [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1448.436081] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1449.498408] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1449.498840] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1449.498916] env[68437]: INFO nova.compute.manager [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Attaching volume a27e27ce-bc3e-4c32-8ecf-992153ab6609 to /dev/sdb [ 1449.530142] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47743082-483b-4067-915e-b1c623a5d4a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.537862] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2425ce32-eb92-463c-8651-8806367e65c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.551202] env[68437]: DEBUG nova.virt.block_device [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updating existing volume attachment record: 81f97773-8d7e-440c-9b0f-a302116ad4e8 {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1452.552875] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.552875] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.552875] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.552875] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1453.231287] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.097892] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1454.098169] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591175', 'volume_id': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'name': 'volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95b8784f-89e4-4ca3-b852-db9417e5b8b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'serial': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1454.099134] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1f4ffc-b4dd-4dd0-892f-e2b1a8d34322 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.117386] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6ba03d-eb8c-4278-a6be-a6768a9769bb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.142722] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609/volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.142991] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eccd9061-5bb5-416e-8f74-a40334f0581b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.163186] env[68437]: DEBUG oslo_vmware.api [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1454.163186] env[68437]: value = "task-2945283" [ 1454.163186] env[68437]: _type = "Task" [ 1454.163186] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.170855] env[68437]: DEBUG oslo_vmware.api [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945283, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.180186] env[68437]: DEBUG oslo_vmware.rw_handles [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5213eed4-5104-ddcf-ffeb-13b026c2aef9/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1454.181121] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3312c7e3-9f89-489f-9f32-dcfa7040988f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.186951] env[68437]: DEBUG oslo_vmware.rw_handles [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5213eed4-5104-ddcf-ffeb-13b026c2aef9/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1454.187192] env[68437]: ERROR oslo_vmware.rw_handles [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5213eed4-5104-ddcf-ffeb-13b026c2aef9/disk-0.vmdk due to incomplete transfer. [ 1454.187396] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9d6c47f6-4374-421f-8f6c-61841ef68332 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.194994] env[68437]: DEBUG oslo_vmware.rw_handles [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5213eed4-5104-ddcf-ffeb-13b026c2aef9/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1454.195223] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Uploaded image 8909f9f6-3e9a-4327-bcd6-dd05b842bb35 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1454.197738] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1454.197988] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3b03bcde-e456-4d77-b1b9-ae0f31a81014 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.202973] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1454.202973] env[68437]: value = "task-2945284" [ 1454.202973] env[68437]: _type = "Task" [ 1454.202973] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.211031] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945284, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.230830] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.230830] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.672650] env[68437]: DEBUG oslo_vmware.api [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945283, 'name': ReconfigVM_Task, 'duration_secs': 0.358051} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.672980] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609/volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1454.677598] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d22f0b2c-bd07-478d-b8e1-462ecd0b5fa2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.691772] env[68437]: DEBUG oslo_vmware.api [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1454.691772] env[68437]: value = "task-2945285" [ 1454.691772] env[68437]: _type = "Task" [ 1454.691772] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.700279] env[68437]: DEBUG oslo_vmware.api [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945285, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.710736] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945284, 'name': Destroy_Task, 'duration_secs': 0.37505} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.711114] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Destroyed the VM [ 1454.711464] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1454.711733] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5066b743-74e7-4f41-a0ff-fdc4e97aee3c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.717313] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1454.717313] env[68437]: value = "task-2945286" [ 1454.717313] env[68437]: _type = "Task" [ 1454.717313] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.725526] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945286, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.202509] env[68437]: DEBUG oslo_vmware.api [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945285, 'name': ReconfigVM_Task, 'duration_secs': 0.137339} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.202808] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591175', 'volume_id': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'name': 'volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95b8784f-89e4-4ca3-b852-db9417e5b8b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'serial': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1455.226954] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.227454] env[68437]: DEBUG oslo_vmware.api [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945286, 'name': RemoveSnapshot_Task, 'duration_secs': 0.495439} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.227648] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.228900] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1455.229139] env[68437]: INFO nova.compute.manager [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Took 11.72 seconds to snapshot the instance on the hypervisor. [ 1455.780577] env[68437]: DEBUG nova.compute.manager [None req-94a02f33-3e6b-4721-9951-6137d2aea9e6 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Found 1 images (rotation: 2) {{(pid=68437) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1456.231322] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1456.240760] env[68437]: DEBUG nova.objects.instance [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1456.320050] env[68437]: DEBUG nova.compute.manager [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1456.321109] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12421874-a90a-4a09-a356-c4fa68bf55d4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.734619] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1456.734806] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1456.734991] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1456.735179] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1456.736076] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2acee4-9f2e-43df-af4e-d782c97d1c87 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.745943] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db18f66-1fd4-4334-9951-7abe4b0a58db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.750007] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4081847a-9309-446f-a434-058f9083c43b tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.251s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1456.760547] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8ad08a-3f72-4dc7-af4e-de9f54158e9c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.766882] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18727cd-187d-4492-b8f8-2361abd72e1d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.795361] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180754MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1456.795489] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1456.795684] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1456.828382] env[68437]: DEBUG oslo_concurrency.lockutils [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1456.828590] env[68437]: DEBUG oslo_concurrency.lockutils [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1456.828759] env[68437]: DEBUG nova.compute.manager [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1456.829543] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3704a1-7052-48f0-b462-9498e873b3f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.833710] env[68437]: INFO nova.compute.manager [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] instance snapshotting [ 1456.834269] env[68437]: DEBUG nova.objects.instance [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'flavor' on Instance uuid 8b80927c-1cda-4652-8c2e-df39c93bae78 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1456.838895] env[68437]: DEBUG nova.compute.manager [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1456.839434] env[68437]: DEBUG nova.objects.instance [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1457.339239] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abc4aca-9384-4e45-8c9a-0cb17bc6e0c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.360656] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff2cc4b-a1ea-4a59-8824-e59197f0e10d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.820423] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 8b80927c-1cda-4652-8c2e-df39c93bae78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1457.820585] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 95b8784f-89e4-4ca3-b852-db9417e5b8b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1457.820766] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1457.820909] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1457.847131] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1457.847579] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-565a0367-0b70-46c0-9957-2eceddbc3572 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.854830] env[68437]: DEBUG oslo_vmware.api [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1457.854830] env[68437]: value = "task-2945287" [ 1457.854830] env[68437]: _type = "Task" [ 1457.854830] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.856260] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde99700-2a85-41be-bafb-831f9618da8e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.868172] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ad2628-4121-4f9f-b332-4966ef15c5c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.871188] env[68437]: DEBUG oslo_vmware.api [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945287, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.872176] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1457.872427] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c25423fd-cf01-4dbc-9b91-874aec059b4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.903868] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed8d65f-b1ed-46fc-9a03-6ba1b5d8a040 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.906569] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1457.906569] env[68437]: value = "task-2945288" [ 1457.906569] env[68437]: _type = "Task" [ 1457.906569] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.913591] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807d22cf-8581-4a5a-9cb8-a39f8c21a977 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.920164] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945288, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.930073] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1458.365157] env[68437]: DEBUG oslo_vmware.api [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945287, 'name': PowerOffVM_Task, 'duration_secs': 0.207545} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.365520] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1458.365665] env[68437]: DEBUG nova.compute.manager [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1458.366353] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2516eea1-c92e-4a00-b2ba-0ac78d8cee0d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.414970] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945288, 'name': CreateSnapshot_Task, 'duration_secs': 0.407054} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.415243] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1458.415854] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae459bf-7895-4873-8aa8-860ca79362c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.432978] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1458.877271] env[68437]: DEBUG oslo_concurrency.lockutils [None req-42583c70-c929-4d3a-ae47-fbbfc826872a tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1458.932961] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1458.933303] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5ddea211-af79-42d1-bbaf-9c69a5870398 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.936853] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1458.937056] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.141s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1458.948389] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1458.948389] env[68437]: value = "task-2945289" [ 1458.948389] env[68437]: _type = "Task" [ 1458.948389] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.182841] env[68437]: DEBUG nova.objects.instance [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.459065] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945289, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.687935] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.688162] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1459.688368] env[68437]: DEBUG nova.network.neutron [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1459.688584] env[68437]: DEBUG nova.objects.instance [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'info_cache' on Instance uuid 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.959446] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945289, 'name': CloneVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.192336] env[68437]: DEBUG nova.objects.base [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Object Instance<95b8784f-89e4-4ca3-b852-db9417e5b8b8> lazy-loaded attributes: flavor,info_cache {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1460.459563] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945289, 'name': CloneVM_Task, 'duration_secs': 1.081246} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.459920] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Created linked-clone VM from snapshot [ 1460.460523] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10552438-b11c-4f18-95cc-525c07d80857 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.468142] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Uploading image 8743628a-a489-4b26-83bf-ad16057effde {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1460.488012] env[68437]: DEBUG oslo_vmware.rw_handles [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1460.488012] env[68437]: value = "vm-591177" [ 1460.488012] env[68437]: _type = "VirtualMachine" [ 1460.488012] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1460.488271] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6a1a60d6-ef36-4260-a70a-b79d13e12020 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.495427] env[68437]: DEBUG oslo_vmware.rw_handles [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease: (returnval){ [ 1460.495427] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522feeb7-6b59-c17d-c323-bd17ec9b5c38" [ 1460.495427] env[68437]: _type = "HttpNfcLease" [ 1460.495427] env[68437]: } obtained for exporting VM: (result){ [ 1460.495427] env[68437]: value = "vm-591177" [ 1460.495427] env[68437]: _type = "VirtualMachine" [ 1460.495427] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1460.495649] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the lease: (returnval){ [ 1460.495649] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522feeb7-6b59-c17d-c323-bd17ec9b5c38" [ 1460.495649] env[68437]: _type = "HttpNfcLease" [ 1460.495649] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1460.501341] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1460.501341] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522feeb7-6b59-c17d-c323-bd17ec9b5c38" [ 1460.501341] env[68437]: _type = "HttpNfcLease" [ 1460.501341] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1461.004722] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1461.004722] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522feeb7-6b59-c17d-c323-bd17ec9b5c38" [ 1461.004722] env[68437]: _type = "HttpNfcLease" [ 1461.004722] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1461.004992] env[68437]: DEBUG oslo_vmware.rw_handles [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1461.004992] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]522feeb7-6b59-c17d-c323-bd17ec9b5c38" [ 1461.004992] env[68437]: _type = "HttpNfcLease" [ 1461.004992] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1461.005784] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70737a08-14e6-4002-b0be-14ca130cb621 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.014689] env[68437]: DEBUG oslo_vmware.rw_handles [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52909303-1598-97d6-196b-d62264387146/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1461.014870] env[68437]: DEBUG oslo_vmware.rw_handles [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52909303-1598-97d6-196b-d62264387146/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1461.097923] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-859ecf41-a91f-49a4-a3a7-9be54f896a3f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.133397] env[68437]: DEBUG nova.network.neutron [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updating instance_info_cache with network_info: [{"id": "289592f8-bb26-4f97-ac37-15183e5f59e2", "address": "fa:16:3e:de:5d:4f", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289592f8-bb", "ovs_interfaceid": "289592f8-bb26-4f97-ac37-15183e5f59e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.636336] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Releasing lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1462.643234] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1462.643714] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80c488b3-8fa5-4d85-af48-5e56bd504ab4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.651154] env[68437]: DEBUG oslo_vmware.api [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1462.651154] env[68437]: value = "task-2945291" [ 1462.651154] env[68437]: _type = "Task" [ 1462.651154] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.659257] env[68437]: DEBUG oslo_vmware.api [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945291, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.163287] env[68437]: DEBUG oslo_vmware.api [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945291, 'name': PowerOnVM_Task, 'duration_secs': 0.403662} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.163583] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1463.163970] env[68437]: DEBUG nova.compute.manager [None req-a927010d-1e68-4ea3-b495-b7e8a44072a9 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1463.164576] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714e352d-5bd7-4183-9c43-6c47dd055a04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.174191] env[68437]: DEBUG oslo_vmware.rw_handles [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52909303-1598-97d6-196b-d62264387146/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1468.175393] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2678de9-3db7-4358-8567-84520cf10902 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.181997] env[68437]: DEBUG oslo_vmware.rw_handles [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52909303-1598-97d6-196b-d62264387146/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1468.182188] env[68437]: ERROR oslo_vmware.rw_handles [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52909303-1598-97d6-196b-d62264387146/disk-0.vmdk due to incomplete transfer. [ 1468.182416] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-69792523-45bc-413b-a497-3c3d3b6e2916 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.191618] env[68437]: DEBUG oslo_vmware.rw_handles [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52909303-1598-97d6-196b-d62264387146/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1468.191813] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Uploaded image 8743628a-a489-4b26-83bf-ad16057effde to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1468.194057] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1468.194292] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-08e560db-ba6d-494a-bfd1-f9a1546ad9d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.200368] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1468.200368] env[68437]: value = "task-2945292" [ 1468.200368] env[68437]: _type = "Task" [ 1468.200368] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.208321] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945292, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.710684] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945292, 'name': Destroy_Task, 'duration_secs': 0.320296} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.710960] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Destroyed the VM [ 1468.711215] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1468.711468] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-be31de3c-22bd-4e02-86e6-df9b1eb9f6b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.718721] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1468.718721] env[68437]: value = "task-2945293" [ 1468.718721] env[68437]: _type = "Task" [ 1468.718721] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.726310] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945293, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.230498] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945293, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.729429] env[68437]: DEBUG oslo_vmware.api [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945293, 'name': RemoveSnapshot_Task, 'duration_secs': 0.541611} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.729722] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1469.729930] env[68437]: INFO nova.compute.manager [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Took 12.39 seconds to snapshot the instance on the hypervisor. [ 1470.266270] env[68437]: DEBUG nova.compute.manager [None req-9e41aac5-31ec-4c8f-9839-14848739f256 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Found 2 images (rotation: 2) {{(pid=68437) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1471.165464] env[68437]: DEBUG nova.compute.manager [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1471.166421] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5046380f-c47d-456b-9eb8-a8f4b9b51606 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.677498] env[68437]: INFO nova.compute.manager [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] instance snapshotting [ 1471.678181] env[68437]: DEBUG nova.objects.instance [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'flavor' on Instance uuid 8b80927c-1cda-4652-8c2e-df39c93bae78 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1472.184470] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d681fd-01f7-44e2-ad1d-f44996307a3a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.204807] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eef153f-45ce-4ed6-9cee-73d34a90c8ce {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.714934] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1472.715381] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4a82b55f-463f-405e-b21c-a3e0284a2c1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.723226] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1472.723226] env[68437]: value = "task-2945294" [ 1472.723226] env[68437]: _type = "Task" [ 1472.723226] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.730535] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945294, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.233188] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945294, 'name': CreateSnapshot_Task, 'duration_secs': 0.403136} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.233466] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1473.234202] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7821f860-0d4f-4a68-8171-2dc2336cfdc8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.750763] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1473.751180] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0c26fb9e-9509-4a3c-afd6-477707638688 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.760466] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1473.760466] env[68437]: value = "task-2945295" [ 1473.760466] env[68437]: _type = "Task" [ 1473.760466] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.767950] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945295, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.271273] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945295, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.771317] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945295, 'name': CloneVM_Task, 'duration_secs': 0.94834} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.771743] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Created linked-clone VM from snapshot [ 1474.772610] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4ac0b6-c608-4e54-9d00-9dff6b5f8e40 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.779289] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Uploading image fe490249-1ae1-4f7e-b22a-ef5e1d736866 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1474.799191] env[68437]: DEBUG oslo_vmware.rw_handles [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1474.799191] env[68437]: value = "vm-591179" [ 1474.799191] env[68437]: _type = "VirtualMachine" [ 1474.799191] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1474.799429] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-dca65e90-f3c5-4666-8edc-c781178c6111 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.805432] env[68437]: DEBUG oslo_vmware.rw_handles [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease: (returnval){ [ 1474.805432] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52931a91-5bb5-5bd6-857c-3d51734f3a64" [ 1474.805432] env[68437]: _type = "HttpNfcLease" [ 1474.805432] env[68437]: } obtained for exporting VM: (result){ [ 1474.805432] env[68437]: value = "vm-591179" [ 1474.805432] env[68437]: _type = "VirtualMachine" [ 1474.805432] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1474.805701] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the lease: (returnval){ [ 1474.805701] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52931a91-5bb5-5bd6-857c-3d51734f3a64" [ 1474.805701] env[68437]: _type = "HttpNfcLease" [ 1474.805701] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1474.811144] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1474.811144] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52931a91-5bb5-5bd6-857c-3d51734f3a64" [ 1474.811144] env[68437]: _type = "HttpNfcLease" [ 1474.811144] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1475.313532] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1475.313532] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52931a91-5bb5-5bd6-857c-3d51734f3a64" [ 1475.313532] env[68437]: _type = "HttpNfcLease" [ 1475.313532] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1475.313825] env[68437]: DEBUG oslo_vmware.rw_handles [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1475.313825] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52931a91-5bb5-5bd6-857c-3d51734f3a64" [ 1475.313825] env[68437]: _type = "HttpNfcLease" [ 1475.313825] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1475.314537] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80801e63-8fe1-4890-a4cf-8fbb223d0cb1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.321607] env[68437]: DEBUG oslo_vmware.rw_handles [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b5031-89f5-7ced-ba45-a269fe56538f/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1475.321743] env[68437]: DEBUG oslo_vmware.rw_handles [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b5031-89f5-7ced-ba45-a269fe56538f/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1475.406511] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c2dd86f9-0a46-47bf-9b7b-55815b016209 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.314378] env[68437]: DEBUG oslo_vmware.rw_handles [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b5031-89f5-7ced-ba45-a269fe56538f/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1482.315353] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182ae464-750c-4cd1-a707-1ee32dd0a0bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.321578] env[68437]: DEBUG oslo_vmware.rw_handles [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b5031-89f5-7ced-ba45-a269fe56538f/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1482.321737] env[68437]: ERROR oslo_vmware.rw_handles [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b5031-89f5-7ced-ba45-a269fe56538f/disk-0.vmdk due to incomplete transfer. [ 1482.321941] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-338468f3-4e26-45b2-894c-e2967fd408d3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.329445] env[68437]: DEBUG oslo_vmware.rw_handles [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525b5031-89f5-7ced-ba45-a269fe56538f/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1482.329639] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Uploaded image fe490249-1ae1-4f7e-b22a-ef5e1d736866 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1482.331892] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1482.332121] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a4f0359c-9cda-4241-81ae-8b287426e527 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.337379] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1482.337379] env[68437]: value = "task-2945297" [ 1482.337379] env[68437]: _type = "Task" [ 1482.337379] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.344408] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945297, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.846625] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945297, 'name': Destroy_Task, 'duration_secs': 0.340805} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.847032] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Destroyed the VM [ 1482.847334] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1482.847638] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-91e5001f-1601-4814-b787-9dbde91e1421 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.855057] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1482.855057] env[68437]: value = "task-2945298" [ 1482.855057] env[68437]: _type = "Task" [ 1482.855057] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.862729] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945298, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.364695] env[68437]: DEBUG oslo_vmware.api [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945298, 'name': RemoveSnapshot_Task, 'duration_secs': 0.48778} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.365122] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1483.365332] env[68437]: INFO nova.compute.manager [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Took 11.18 seconds to snapshot the instance on the hypervisor. [ 1483.912872] env[68437]: DEBUG nova.compute.manager [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Found 3 images (rotation: 2) {{(pid=68437) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1483.913093] env[68437]: DEBUG nova.compute.manager [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Rotating out 1 backups {{(pid=68437) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1483.913258] env[68437]: DEBUG nova.compute.manager [None req-6e12473a-e3b9-4e0e-bb91-dab42621d6b1 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deleting image 8909f9f6-3e9a-4327-bcd6-dd05b842bb35 {{(pid=68437) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1486.063130] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "8b80927c-1cda-4652-8c2e-df39c93bae78" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1486.063513] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1486.063635] env[68437]: DEBUG nova.compute.manager [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1486.064566] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081c15fa-e815-437b-8306-37372488e03f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.071351] env[68437]: DEBUG nova.compute.manager [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1486.071900] env[68437]: DEBUG nova.objects.instance [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'flavor' on Instance uuid 8b80927c-1cda-4652-8c2e-df39c93bae78 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1487.079182] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1487.079559] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf66a118-3b02-4e1e-8e42-3b11671063c4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.087337] env[68437]: DEBUG oslo_vmware.api [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1487.087337] env[68437]: value = "task-2945299" [ 1487.087337] env[68437]: _type = "Task" [ 1487.087337] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.095090] env[68437]: DEBUG oslo_vmware.api [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.597347] env[68437]: DEBUG oslo_vmware.api [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945299, 'name': PowerOffVM_Task, 'duration_secs': 0.174768} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.597622] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1487.597816] env[68437]: DEBUG nova.compute.manager [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1487.598560] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda4ee1f-1018-40ab-bf52-0b86cdf3432f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.109472] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c9d858e5-a1ea-4afa-93d3-7aa71d4b4097 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1489.266283] env[68437]: DEBUG nova.compute.manager [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Stashing vm_state: stopped {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1489.782932] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1489.783236] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1490.287825] env[68437]: INFO nova.compute.claims [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1490.793975] env[68437]: INFO nova.compute.resource_tracker [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating resource usage from migration cc72781e-67c7-4fed-a701-f72a2c4a65d7 [ 1490.843893] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab99dddf-b454-4255-b7a4-ae087d87f1c3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.851154] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9ccb0c-3706-4a8b-8e4d-03c7200d7966 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.881396] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b07acf3-bb68-4b46-9832-98304dc3adb0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.888444] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d4fc35-ca7d-403e-85cf-b03b32dc3a90 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.901407] env[68437]: DEBUG nova.compute.provider_tree [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1491.404785] env[68437]: DEBUG nova.scheduler.client.report [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1491.910455] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.127s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1491.910731] env[68437]: INFO nova.compute.manager [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Migrating [ 1492.425447] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.425815] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1492.425866] env[68437]: DEBUG nova.network.neutron [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1493.154573] env[68437]: DEBUG nova.network.neutron [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance_info_cache with network_info: [{"id": "69e7c375-5473-4c45-822e-b368dafd9ff9", "address": "fa:16:3e:47:3c:79", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e7c375-54", "ovs_interfaceid": "69e7c375-5473-4c45-822e-b368dafd9ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.658066] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1495.173111] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2b5ff8-009b-4e17-8691-9ab46fa8cd91 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.193116] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance '8b80927c-1cda-4652-8c2e-df39c93bae78' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1495.699333] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1495.699645] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a99a9df4-7a2b-4672-a87a-180134e5135d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.707490] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1495.707490] env[68437]: value = "task-2945300" [ 1495.707490] env[68437]: _type = "Task" [ 1495.707490] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.714750] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945300, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.218191] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1496.218454] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance '8b80927c-1cda-4652-8c2e-df39c93bae78' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1496.725193] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1496.725449] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1496.725585] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1496.725774] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1496.725973] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1496.726159] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1496.726375] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1496.726537] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1496.726703] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1496.726868] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1496.727053] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1496.732040] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fa9171e-aea0-4b82-8113-1edc5b93a980 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.748019] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1496.748019] env[68437]: value = "task-2945301" [ 1496.748019] env[68437]: _type = "Task" [ 1496.748019] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.757658] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945301, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.258110] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945301, 'name': ReconfigVM_Task, 'duration_secs': 0.143594} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.258442] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance '8b80927c-1cda-4652-8c2e-df39c93bae78' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1497.764704] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1497.765049] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1497.765148] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1497.765307] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1497.765455] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1497.765601] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1497.765803] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1497.765975] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1497.766167] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1497.766325] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1497.766494] env[68437]: DEBUG nova.virt.hardware [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1497.771741] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Reconfiguring VM instance instance-00000079 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1497.772030] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55c0560f-460f-4b13-8d51-4bd6321de30f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.789727] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1497.789727] env[68437]: value = "task-2945302" [ 1497.789727] env[68437]: _type = "Task" [ 1497.789727] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.797357] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.299663] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945302, 'name': ReconfigVM_Task, 'duration_secs': 0.198354} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.299936] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Reconfigured VM instance instance-00000079 to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1498.300702] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d7456f-5a7c-4a22-9c2a-c6dd0b755088 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.322379] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 8b80927c-1cda-4652-8c2e-df39c93bae78/8b80927c-1cda-4652-8c2e-df39c93bae78.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1498.322635] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc58e07d-99b1-42b0-8bfd-54abdf374ad5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.340176] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1498.340176] env[68437]: value = "task-2945303" [ 1498.340176] env[68437]: _type = "Task" [ 1498.340176] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.348753] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945303, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.850253] env[68437]: DEBUG oslo_vmware.api [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945303, 'name': ReconfigVM_Task, 'duration_secs': 0.265892} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.850524] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 8b80927c-1cda-4652-8c2e-df39c93bae78/8b80927c-1cda-4652-8c2e-df39c93bae78.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1498.850788] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance '8b80927c-1cda-4652-8c2e-df39c93bae78' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1499.358768] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a7edbf-af17-4c22-a3e2-379c3a7227bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.376646] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c9bb8b-5376-4655-a41c-9b9a97b149f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.393661] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance '8b80927c-1cda-4652-8c2e-df39c93bae78' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1499.932392] env[68437]: DEBUG nova.network.neutron [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Port 69e7c375-5473-4c45-822e-b368dafd9ff9 binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 1500.636071] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.636486] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.956222] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "8b80927c-1cda-4652-8c2e-df39c93bae78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1500.956222] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1500.956222] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1501.139353] env[68437]: INFO nova.compute.manager [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Detaching volume a27e27ce-bc3e-4c32-8ecf-992153ab6609 [ 1501.168559] env[68437]: INFO nova.virt.block_device [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Attempting to driver detach volume a27e27ce-bc3e-4c32-8ecf-992153ab6609 from mountpoint /dev/sdb [ 1501.168797] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1501.168985] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591175', 'volume_id': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'name': 'volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95b8784f-89e4-4ca3-b852-db9417e5b8b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'serial': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1501.169875] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf78e75-1591-4d7e-ae58-b7bc401f2907 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.190857] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841c9fa1-5ad5-41b7-ab2c-0a9635d55f0c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.197492] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d59b074-54f0-40ac-a00d-92efb6af9ffe {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.216924] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24cdc0e-d95a-4396-ad9c-f7345fadc4d7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.230529] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] The volume has not been displaced from its original location: [datastore1] volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609/volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1501.235572] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Reconfiguring VM instance instance-0000007a to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1501.235810] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfb84279-6974-4fce-9d6b-3914b7bd44d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.252174] env[68437]: DEBUG oslo_vmware.api [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1501.252174] env[68437]: value = "task-2945304" [ 1501.252174] env[68437]: _type = "Task" [ 1501.252174] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.259138] env[68437]: DEBUG oslo_vmware.api [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945304, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.761611] env[68437]: DEBUG oslo_vmware.api [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945304, 'name': ReconfigVM_Task, 'duration_secs': 0.225609} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.761952] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Reconfigured VM instance instance-0000007a to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1501.766356] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1297e61-99c0-4690-a5ca-ad00524f8e38 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.780368] env[68437]: DEBUG oslo_vmware.api [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1501.780368] env[68437]: value = "task-2945305" [ 1501.780368] env[68437]: _type = "Task" [ 1501.780368] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.787776] env[68437]: DEBUG oslo_vmware.api [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945305, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.989527] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.989607] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1501.989751] env[68437]: DEBUG nova.network.neutron [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1502.290204] env[68437]: DEBUG oslo_vmware.api [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945305, 'name': ReconfigVM_Task, 'duration_secs': 0.134261} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.290535] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591175', 'volume_id': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'name': 'volume-a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95b8784f-89e4-4ca3-b852-db9417e5b8b8', 'attached_at': '', 'detached_at': '', 'volume_id': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609', 'serial': 'a27e27ce-bc3e-4c32-8ecf-992153ab6609'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1502.683025] env[68437]: DEBUG nova.network.neutron [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance_info_cache with network_info: [{"id": "69e7c375-5473-4c45-822e-b368dafd9ff9", "address": "fa:16:3e:47:3c:79", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e7c375-54", "ovs_interfaceid": "69e7c375-5473-4c45-822e-b368dafd9ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.828884] env[68437]: DEBUG nova.objects.instance [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1503.185765] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1503.710506] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63adc283-254e-4728-af1d-cbb69b9eb146 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.729241] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244f5beb-19ce-4348-938e-6fda2b9e0d7c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.735855] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance '8b80927c-1cda-4652-8c2e-df39c93bae78' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1503.800517] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1503.839930] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ee2eff56-e134-43ce-9d64-fa6bad34201c tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.203s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1503.840254] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.040s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1503.840460] env[68437]: DEBUG nova.compute.manager [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1503.841487] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9954bf0e-c3ce-4eb7-b4f4-40a3f5d22375 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.848276] env[68437]: DEBUG nova.compute.manager [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68437) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1503.848840] env[68437]: DEBUG nova.objects.instance [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1504.242363] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1ccb56-e909-493c-9302-a6890d43b33b tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance '8b80927c-1cda-4652-8c2e-df39c93bae78' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1504.854796] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1504.855155] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-810ea572-71c0-40ce-80cd-f4934dc8d3fa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.862724] env[68437]: DEBUG oslo_vmware.api [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1504.862724] env[68437]: value = "task-2945306" [ 1504.862724] env[68437]: _type = "Task" [ 1504.862724] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.870409] env[68437]: DEBUG oslo_vmware.api [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.371761] env[68437]: DEBUG oslo_vmware.api [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945306, 'name': PowerOffVM_Task, 'duration_secs': 0.195007} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.372085] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1505.372216] env[68437]: DEBUG nova.compute.manager [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1505.372941] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2105e17-8db8-4060-b688-cbab093063d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.883431] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f6de385c-45c1-4d48-947b-7470016474ec tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.043s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1506.132707] env[68437]: DEBUG nova.objects.instance [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1506.552201] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "8b80927c-1cda-4652-8c2e-df39c93bae78" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1506.552433] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1506.552608] env[68437]: DEBUG nova.compute.manager [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Going to confirm migration 9 {{(pid=68437) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1506.637334] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.637541] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1506.637657] env[68437]: DEBUG nova.network.neutron [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1506.637830] env[68437]: DEBUG nova.objects.instance [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'info_cache' on Instance uuid 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1507.112801] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1507.113027] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1507.113234] env[68437]: DEBUG nova.network.neutron [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1507.113456] env[68437]: DEBUG nova.objects.instance [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'info_cache' on Instance uuid 8b80927c-1cda-4652-8c2e-df39c93bae78 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1507.140813] env[68437]: DEBUG nova.objects.base [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Object Instance<95b8784f-89e4-4ca3-b852-db9417e5b8b8> lazy-loaded attributes: flavor,info_cache {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1507.828417] env[68437]: DEBUG nova.network.neutron [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updating instance_info_cache with network_info: [{"id": "289592f8-bb26-4f97-ac37-15183e5f59e2", "address": "fa:16:3e:de:5d:4f", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap289592f8-bb", "ovs_interfaceid": "289592f8-bb26-4f97-ac37-15183e5f59e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.308377] env[68437]: DEBUG nova.network.neutron [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance_info_cache with network_info: [{"id": "69e7c375-5473-4c45-822e-b368dafd9ff9", "address": "fa:16:3e:47:3c:79", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e7c375-54", "ovs_interfaceid": "69e7c375-5473-4c45-822e-b368dafd9ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.330490] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Releasing lock "refresh_cache-95b8784f-89e4-4ca3-b852-db9417e5b8b8" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1508.811259] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1508.811587] env[68437]: DEBUG nova.objects.instance [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'migration_context' on Instance uuid 8b80927c-1cda-4652-8c2e-df39c93bae78 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1509.314334] env[68437]: DEBUG nova.objects.base [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Object Instance<8b80927c-1cda-4652-8c2e-df39c93bae78> lazy-loaded attributes: info_cache,migration_context {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1509.315805] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb1acc2-502f-4cbb-8595-939b5778609c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.334328] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1509.335135] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31ee943e-26d0-4962-ab3c-7ac9d60a40c1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.336616] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c10f569-c212-4b2c-8724-04a8cac2aa73 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.341529] env[68437]: DEBUG oslo_vmware.api [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1509.341529] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]525b83c5-9bcf-87ae-7988-66611099c524" [ 1509.341529] env[68437]: _type = "Task" [ 1509.341529] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.345411] env[68437]: DEBUG oslo_vmware.api [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1509.345411] env[68437]: value = "task-2945307" [ 1509.345411] env[68437]: _type = "Task" [ 1509.345411] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.352397] env[68437]: DEBUG oslo_vmware.api [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]525b83c5-9bcf-87ae-7988-66611099c524, 'name': SearchDatastore_Task, 'duration_secs': 0.00886} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.355160] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1509.355407] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1509.356983] env[68437]: DEBUG oslo_vmware.api [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945307, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.856177] env[68437]: DEBUG oslo_vmware.api [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945307, 'name': PowerOnVM_Task, 'duration_secs': 0.398527} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.856463] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1509.856660] env[68437]: DEBUG nova.compute.manager [None req-3ebc485f-dc46-4e51-b6dc-577909e3057f tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1509.857422] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f5fddd-1279-447f-aa34-00fec938badd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.915918] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ad632b-6507-4124-ac0a-d11d5a0bdca9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.922954] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769e7f90-c8be-4cbe-93a5-0f7d871a246b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.953890] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04964680-b3b4-4454-a19b-d7e0092038e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.960945] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bac0b8-7229-4af0-aee3-b2760549898c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.973781] env[68437]: DEBUG nova.compute.provider_tree [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1510.476989] env[68437]: DEBUG nova.scheduler.client.report [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1511.487600] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.132s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1511.488040] env[68437]: DEBUG nova.compute.manager [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=68437) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1511.937390] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1511.937685] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1511.937832] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1512.042662] env[68437]: INFO nova.scheduler.client.report [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted allocation for migration cc72781e-67c7-4fed-a701-f72a2c4a65d7 [ 1512.231920] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1512.256190] env[68437]: DEBUG nova.objects.instance [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'flavor' on Instance uuid 8b80927c-1cda-4652-8c2e-df39c93bae78 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1512.548655] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b940b7-b870-477b-8d48-1a3380beb80c tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 5.996s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1512.761052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.761280] env[68437]: DEBUG oslo_concurrency.lockutils [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1512.761395] env[68437]: DEBUG nova.network.neutron [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1512.761571] env[68437]: DEBUG nova.objects.instance [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'info_cache' on Instance uuid 8b80927c-1cda-4652-8c2e-df39c93bae78 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1513.265052] env[68437]: DEBUG nova.objects.base [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Object Instance<8b80927c-1cda-4652-8c2e-df39c93bae78> lazy-loaded attributes: flavor,info_cache {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1513.962889] env[68437]: DEBUG nova.network.neutron [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance_info_cache with network_info: [{"id": "69e7c375-5473-4c45-822e-b368dafd9ff9", "address": "fa:16:3e:47:3c:79", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69e7c375-54", "ovs_interfaceid": "69e7c375-5473-4c45-822e-b368dafd9ff9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.230899] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.465540] env[68437]: DEBUG oslo_concurrency.lockutils [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-8b80927c-1cda-4652-8c2e-df39c93bae78" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1515.226194] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1515.230982] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1515.472379] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1515.472721] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c306c692-cba3-42b9-9dfd-6eb14340e9d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.480635] env[68437]: DEBUG oslo_vmware.api [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1515.480635] env[68437]: value = "task-2945308" [ 1515.480635] env[68437]: _type = "Task" [ 1515.480635] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.488229] env[68437]: DEBUG oslo_vmware.api [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945308, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.991059] env[68437]: DEBUG oslo_vmware.api [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945308, 'name': PowerOnVM_Task, 'duration_secs': 0.360208} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.991265] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1515.991482] env[68437]: DEBUG nova.compute.manager [None req-65530c86-e932-412a-8d5a-49a5b176f78f tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1515.992240] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c83c01d1-f0fc-4ee9-8e95-e3a3fe7643f2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.733312] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.733724] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.733724] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.733850] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Cleaning up deleted instances {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 1516.810680] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "8b80927c-1cda-4652-8c2e-df39c93bae78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1516.810942] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1516.811186] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "8b80927c-1cda-4652-8c2e-df39c93bae78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1516.811373] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1516.811545] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1516.813504] env[68437]: INFO nova.compute.manager [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Terminating instance [ 1517.241466] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] There are 20 instances to clean {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 1517.241679] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: b85083c1-6b10-4bd4-8bf4-a23e961863f0] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1517.317058] env[68437]: DEBUG nova.compute.manager [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1517.317295] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1517.318188] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d13a5b8-bba6-4d24-a9c1-34e75c7f771f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.325975] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1517.326215] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01b833a9-12c2-46d5-a3fb-de4fc26681ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.332505] env[68437]: DEBUG oslo_vmware.api [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1517.332505] env[68437]: value = "task-2945309" [ 1517.332505] env[68437]: _type = "Task" [ 1517.332505] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.339852] env[68437]: DEBUG oslo_vmware.api [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945309, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.745452] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 88191506-b278-4502-b72d-07169f4fd6a6] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1517.841526] env[68437]: DEBUG oslo_vmware.api [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945309, 'name': PowerOffVM_Task, 'duration_secs': 0.174694} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.841795] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1517.841965] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1517.842222] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7069b58f-1372-49e1-bf26-da5fa9c55d4e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.903481] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1517.903700] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1517.903886] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleting the datastore file [datastore1] 8b80927c-1cda-4652-8c2e-df39c93bae78 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1517.904160] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cb2102d-5e8d-4a12-a8a8-06407550af90 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.910449] env[68437]: DEBUG oslo_vmware.api [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1517.910449] env[68437]: value = "task-2945311" [ 1517.910449] env[68437]: _type = "Task" [ 1517.910449] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.917709] env[68437]: DEBUG oslo_vmware.api [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.249317] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 3562f6dc-2596-4878-96f5-1e0da54a168b] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1518.419786] env[68437]: DEBUG oslo_vmware.api [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139615} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.420068] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1518.420261] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1518.420442] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1518.420617] env[68437]: INFO nova.compute.manager [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1518.420885] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1518.421094] env[68437]: DEBUG nova.compute.manager [-] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1518.421191] env[68437]: DEBUG nova.network.neutron [-] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1518.753202] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 6d4f617c-97d6-4721-97c0-7a6b8676d681] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1518.851325] env[68437]: DEBUG nova.compute.manager [req-efbc6cf7-41d7-4a56-97a3-9b2bf0646bd7 req-c1c402e6-f7a5-4e63-8a7a-c0933caa86d7 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Received event network-vif-deleted-69e7c375-5473-4c45-822e-b368dafd9ff9 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1518.851325] env[68437]: INFO nova.compute.manager [req-efbc6cf7-41d7-4a56-97a3-9b2bf0646bd7 req-c1c402e6-f7a5-4e63-8a7a-c0933caa86d7 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Neutron deleted interface 69e7c375-5473-4c45-822e-b368dafd9ff9; detaching it from the instance and deleting it from the info cache [ 1518.851325] env[68437]: DEBUG nova.network.neutron [req-efbc6cf7-41d7-4a56-97a3-9b2bf0646bd7 req-c1c402e6-f7a5-4e63-8a7a-c0933caa86d7 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.256127] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 2ac0c165-a898-4d23-a346-2567921caf1b] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1519.332752] env[68437]: DEBUG nova.network.neutron [-] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.353893] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd8e50f1-ad46-4a7b-84f0-e9d6c57f7198 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.364086] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c30b0d-116f-4576-ae5d-e77e6cc6adcf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.389688] env[68437]: DEBUG nova.compute.manager [req-efbc6cf7-41d7-4a56-97a3-9b2bf0646bd7 req-c1c402e6-f7a5-4e63-8a7a-c0933caa86d7 service nova] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Detach interface failed, port_id=69e7c375-5473-4c45-822e-b368dafd9ff9, reason: Instance 8b80927c-1cda-4652-8c2e-df39c93bae78 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1519.760030] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 7705f1c5-3b96-426c-9553-b67f2951825b] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1519.835136] env[68437]: INFO nova.compute.manager [-] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Took 1.41 seconds to deallocate network for instance. [ 1520.263302] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 0a246b14-5078-4549-a270-73f99a1647c7] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1520.342181] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1520.342497] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1520.342702] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1520.396553] env[68437]: INFO nova.scheduler.client.report [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted allocations for instance 8b80927c-1cda-4652-8c2e-df39c93bae78 [ 1520.766832] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: eb5c9d63-ac51-4cef-93c9-c15119ea2ea7] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1520.904200] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b0c47a84-f2ae-4010-a39c-d0dcc03403f8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "8b80927c-1cda-4652-8c2e-df39c93bae78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.093s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1521.269728] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 524c1b76-3563-482d-a676-26fa6c28a3c7] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1521.673185] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "0f078ed3-d253-4bc4-901c-3c84027392b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1521.673415] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1521.773340] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: ede62837-4ff5-44be-a015-9ea06b9126a5] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1522.175535] env[68437]: DEBUG nova.compute.manager [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1522.275865] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 55076395-fd1d-48a7-ab85-fe0eb03afa19] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1522.696884] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1522.697217] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1522.698820] env[68437]: INFO nova.compute.claims [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1522.779168] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 7a05d783-afac-43a1-a715-c83b42c990c2] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1523.281656] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 8554a78c-c2d7-459d-a295-121da777dfd4] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1523.743389] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08daf23-2d2f-4467-b467-0adc6ada172c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.751507] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe74de4-7677-4d49-894c-d2f139bbf13c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.783739] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b07f43b-9bf6-471f-9277-448e6cd82f73 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.786058] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 00b76648-d27b-4002-80cb-366e64c32ecc] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1523.792514] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1535a9-1b79-41f4-bae0-c34ddef84762 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.805516] env[68437]: DEBUG nova.compute.provider_tree [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.289322] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: e81e633d-34a6-443d-a2fe-95e6d8afa552] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1524.308965] env[68437]: DEBUG nova.scheduler.client.report [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1524.793163] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: aff861ed-e792-480a-811e-c157c0606d08] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1524.814052] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.117s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1524.814569] env[68437]: DEBUG nova.compute.manager [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1525.296051] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 191b441c-2c9f-48f9-b83a-d539722e6375] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1525.320565] env[68437]: DEBUG nova.compute.utils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1525.321914] env[68437]: DEBUG nova.compute.manager [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1525.322101] env[68437]: DEBUG nova.network.neutron [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1525.376206] env[68437]: DEBUG nova.policy [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5f65dac5fd04c59b33cb454ee1c3e2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa08e0c3081143cbb8f4d00d7e5cf222', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1525.634558] env[68437]: DEBUG nova.network.neutron [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Successfully created port: fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1525.798975] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: e2143e07-8c8d-4008-bb73-29aae91baee7] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1525.825668] env[68437]: DEBUG nova.compute.manager [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1526.302226] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 353ebb37-7e69-49d4-873e-2272cbfff6e8] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1526.805921] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 8ccd7176-55c0-4118-a07e-3c4bdbba9795] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1526.835131] env[68437]: DEBUG nova.compute.manager [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1526.861055] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1526.861325] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1526.861488] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1526.861680] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1526.861831] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1526.861979] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1526.862208] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1526.862369] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1526.862539] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1526.862705] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1526.862881] env[68437]: DEBUG nova.virt.hardware [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1526.863987] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30257e85-0d3f-4a46-95ca-6da57875e53d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.872572] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6884214d-fe14-4e0b-9f5f-da4addf0435a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.972113] env[68437]: DEBUG nova.compute.manager [req-5a696a35-03e6-48f3-9cd5-93a54af0ee79 req-f92ac196-d8a3-4400-9839-9321c0aaa87d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Received event network-vif-plugged-fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1526.972354] env[68437]: DEBUG oslo_concurrency.lockutils [req-5a696a35-03e6-48f3-9cd5-93a54af0ee79 req-f92ac196-d8a3-4400-9839-9321c0aaa87d service nova] Acquiring lock "0f078ed3-d253-4bc4-901c-3c84027392b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1526.972592] env[68437]: DEBUG oslo_concurrency.lockutils [req-5a696a35-03e6-48f3-9cd5-93a54af0ee79 req-f92ac196-d8a3-4400-9839-9321c0aaa87d service nova] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1526.972742] env[68437]: DEBUG oslo_concurrency.lockutils [req-5a696a35-03e6-48f3-9cd5-93a54af0ee79 req-f92ac196-d8a3-4400-9839-9321c0aaa87d service nova] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1526.972899] env[68437]: DEBUG nova.compute.manager [req-5a696a35-03e6-48f3-9cd5-93a54af0ee79 req-f92ac196-d8a3-4400-9839-9321c0aaa87d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] No waiting events found dispatching network-vif-plugged-fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1526.973081] env[68437]: WARNING nova.compute.manager [req-5a696a35-03e6-48f3-9cd5-93a54af0ee79 req-f92ac196-d8a3-4400-9839-9321c0aaa87d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Received unexpected event network-vif-plugged-fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 for instance with vm_state building and task_state spawning. [ 1527.048265] env[68437]: DEBUG nova.network.neutron [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Successfully updated port: fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1527.309393] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1527.309511] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Cleaning up deleted instances with incomplete migration {{(pid=68437) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 1527.551450] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.551497] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1527.551723] env[68437]: DEBUG nova.network.neutron [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1528.084013] env[68437]: DEBUG nova.network.neutron [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1528.213205] env[68437]: DEBUG nova.network.neutron [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance_info_cache with network_info: [{"id": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "address": "fa:16:3e:c2:4a:2b", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc43b1-2f", "ovs_interfaceid": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.308713] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.715779] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1528.716147] env[68437]: DEBUG nova.compute.manager [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Instance network_info: |[{"id": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "address": "fa:16:3e:c2:4a:2b", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc43b1-2f", "ovs_interfaceid": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1528.716601] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:4a:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fcbc43b1-2f65-489a-a9b4-5b1fa5f71458', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1528.724232] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1528.724453] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1528.724684] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5014b8bf-293e-49c9-ae6a-8497bdd946e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.744050] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1528.744050] env[68437]: value = "task-2945312" [ 1528.744050] env[68437]: _type = "Task" [ 1528.744050] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.751327] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945312, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.811784] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1528.812112] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1528.812346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1528.812552] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1528.813589] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22bbfd9-bb1f-48ff-9155-953a0124d0b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.822285] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380f09ed-45e0-4e75-ba63-365502035faa {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.836634] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8449c9ac-19ba-4e30-9e0e-5045ff1d33f4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.842770] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968c8646-57d5-4c74-a120-9a3c602898c9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.873294] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180728MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1528.873486] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1528.873725] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1528.997627] env[68437]: DEBUG nova.compute.manager [req-8fdec4f2-877f-495e-bcf5-13fad963003d req-70efabc1-bdc4-474c-861b-a4a0a500aadd service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Received event network-changed-fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1528.997892] env[68437]: DEBUG nova.compute.manager [req-8fdec4f2-877f-495e-bcf5-13fad963003d req-70efabc1-bdc4-474c-861b-a4a0a500aadd service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Refreshing instance network info cache due to event network-changed-fcbc43b1-2f65-489a-a9b4-5b1fa5f71458. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1528.998111] env[68437]: DEBUG oslo_concurrency.lockutils [req-8fdec4f2-877f-495e-bcf5-13fad963003d req-70efabc1-bdc4-474c-861b-a4a0a500aadd service nova] Acquiring lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.998257] env[68437]: DEBUG oslo_concurrency.lockutils [req-8fdec4f2-877f-495e-bcf5-13fad963003d req-70efabc1-bdc4-474c-861b-a4a0a500aadd service nova] Acquired lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1528.998416] env[68437]: DEBUG nova.network.neutron [req-8fdec4f2-877f-495e-bcf5-13fad963003d req-70efabc1-bdc4-474c-861b-a4a0a500aadd service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Refreshing network info cache for port fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1529.254797] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945312, 'name': CreateVM_Task, 'duration_secs': 0.301004} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.255189] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1529.255672] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.255844] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1529.256204] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1529.256460] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80fdb87b-fe53-4c2e-bd44-2f1bfc216694 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.260727] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1529.260727] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]523cc52b-f754-2e30-ffd0-37dc90651ae7" [ 1529.260727] env[68437]: _type = "Task" [ 1529.260727] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.268034] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523cc52b-f754-2e30-ffd0-37dc90651ae7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.676081] env[68437]: DEBUG nova.network.neutron [req-8fdec4f2-877f-495e-bcf5-13fad963003d req-70efabc1-bdc4-474c-861b-a4a0a500aadd service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updated VIF entry in instance network info cache for port fcbc43b1-2f65-489a-a9b4-5b1fa5f71458. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1529.676477] env[68437]: DEBUG nova.network.neutron [req-8fdec4f2-877f-495e-bcf5-13fad963003d req-70efabc1-bdc4-474c-861b-a4a0a500aadd service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance_info_cache with network_info: [{"id": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "address": "fa:16:3e:c2:4a:2b", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc43b1-2f", "ovs_interfaceid": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.770918] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]523cc52b-f754-2e30-ffd0-37dc90651ae7, 'name': SearchDatastore_Task, 'duration_secs': 0.01275} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.771228] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1529.771459] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1529.771691] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.771832] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1529.772016] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1529.772265] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6e2d766-8275-4d35-9fc1-4df6a43dd484 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.780253] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1529.780382] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1529.781043] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f220a24-2c52-4591-896c-d488e6ad531d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.785609] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1529.785609] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526196ba-d15f-71e4-e51a-0d426e3970f8" [ 1529.785609] env[68437]: _type = "Task" [ 1529.785609] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.792326] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526196ba-d15f-71e4-e51a-0d426e3970f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.897724] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 95b8784f-89e4-4ca3-b852-db9417e5b8b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.897879] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 0f078ed3-d253-4bc4-901c-3c84027392b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.898066] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1529.898212] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1529.932888] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbb1f3a-f935-41da-893a-79f721e31e37 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.940423] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8060946e-716d-455a-a181-3e2905ded97a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.970371] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243dbaa9-9949-4bcc-b0a8-2b289ffd6de1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.976991] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e841d7-3152-4407-9f15-0ea4936dd1b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.989403] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.179764] env[68437]: DEBUG oslo_concurrency.lockutils [req-8fdec4f2-877f-495e-bcf5-13fad963003d req-70efabc1-bdc4-474c-861b-a4a0a500aadd service nova] Releasing lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1530.295802] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526196ba-d15f-71e4-e51a-0d426e3970f8, 'name': SearchDatastore_Task, 'duration_secs': 0.008281} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.296591] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3351b82c-41de-42f1-9222-186ab0a6ea55 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.301549] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1530.301549] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e0e88a-22f8-6b0c-11ed-faa1b569393d" [ 1530.301549] env[68437]: _type = "Task" [ 1530.301549] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.309708] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e0e88a-22f8-6b0c-11ed-faa1b569393d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.493067] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1530.812081] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e0e88a-22f8-6b0c-11ed-faa1b569393d, 'name': SearchDatastore_Task, 'duration_secs': 0.010135} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.812365] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1530.812627] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 0f078ed3-d253-4bc4-901c-3c84027392b4/0f078ed3-d253-4bc4-901c-3c84027392b4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1530.812879] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac6ddb41-0418-4127-88e7-a7a47d5c0ad1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.820260] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1530.820260] env[68437]: value = "task-2945313" [ 1530.820260] env[68437]: _type = "Task" [ 1530.820260] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.827366] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.997603] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1530.997814] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.124s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1531.330640] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945313, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449126} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.330933] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 0f078ed3-d253-4bc4-901c-3c84027392b4/0f078ed3-d253-4bc4-901c-3c84027392b4.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1531.331229] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1531.331571] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6db6675-20c1-48ba-9297-2e4bf6b9b5a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.337434] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1531.337434] env[68437]: value = "task-2945314" [ 1531.337434] env[68437]: _type = "Task" [ 1531.337434] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.346615] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945314, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.846811] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945314, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059104} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.847119] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1531.847890] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8ea997-09c1-424c-a584-4db9c6505bc6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.869266] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 0f078ed3-d253-4bc4-901c-3c84027392b4/0f078ed3-d253-4bc4-901c-3c84027392b4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1531.869517] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c0302b1-e317-498f-9491-3ab27dbdb997 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.887997] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1531.887997] env[68437]: value = "task-2945315" [ 1531.887997] env[68437]: _type = "Task" [ 1531.887997] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.895503] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945315, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.397668] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945315, 'name': ReconfigVM_Task, 'duration_secs': 0.289583} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.398042] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 0f078ed3-d253-4bc4-901c-3c84027392b4/0f078ed3-d253-4bc4-901c-3c84027392b4.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1532.398598] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e56bf836-d89a-49b3-872f-d8f186d6f237 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.404678] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1532.404678] env[68437]: value = "task-2945316" [ 1532.404678] env[68437]: _type = "Task" [ 1532.404678] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.412010] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945316, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.915125] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945316, 'name': Rename_Task, 'duration_secs': 0.142151} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.915408] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1532.915648] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45132281-477c-4a14-8cf7-9da46f53304c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.921999] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1532.921999] env[68437]: value = "task-2945317" [ 1532.921999] env[68437]: _type = "Task" [ 1532.921999] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.929463] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945317, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.431765] env[68437]: DEBUG oslo_vmware.api [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945317, 'name': PowerOnVM_Task, 'duration_secs': 0.459686} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.432155] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1533.432284] env[68437]: INFO nova.compute.manager [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Took 6.60 seconds to spawn the instance on the hypervisor. [ 1533.432480] env[68437]: DEBUG nova.compute.manager [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1533.433287] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f14a321-47d9-4fe8-81bd-c6dbeda22491 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.950702] env[68437]: INFO nova.compute.manager [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Took 11.27 seconds to build instance. [ 1534.453395] env[68437]: DEBUG oslo_concurrency.lockutils [None req-f08617a8-c798-4649-8fcc-9b6bbc4c4ab8 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.780s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1534.800204] env[68437]: DEBUG nova.compute.manager [req-d82ac25f-1c3b-4aee-b8d7-fdb5cf577479 req-ea9436ce-3637-4b52-befc-130b8636136d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Received event network-changed-fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1534.800366] env[68437]: DEBUG nova.compute.manager [req-d82ac25f-1c3b-4aee-b8d7-fdb5cf577479 req-ea9436ce-3637-4b52-befc-130b8636136d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Refreshing instance network info cache due to event network-changed-fcbc43b1-2f65-489a-a9b4-5b1fa5f71458. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1534.800599] env[68437]: DEBUG oslo_concurrency.lockutils [req-d82ac25f-1c3b-4aee-b8d7-fdb5cf577479 req-ea9436ce-3637-4b52-befc-130b8636136d service nova] Acquiring lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.800744] env[68437]: DEBUG oslo_concurrency.lockutils [req-d82ac25f-1c3b-4aee-b8d7-fdb5cf577479 req-ea9436ce-3637-4b52-befc-130b8636136d service nova] Acquired lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1534.800908] env[68437]: DEBUG nova.network.neutron [req-d82ac25f-1c3b-4aee-b8d7-fdb5cf577479 req-ea9436ce-3637-4b52-befc-130b8636136d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Refreshing network info cache for port fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1535.498127] env[68437]: DEBUG nova.network.neutron [req-d82ac25f-1c3b-4aee-b8d7-fdb5cf577479 req-ea9436ce-3637-4b52-befc-130b8636136d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updated VIF entry in instance network info cache for port fcbc43b1-2f65-489a-a9b4-5b1fa5f71458. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1535.498512] env[68437]: DEBUG nova.network.neutron [req-d82ac25f-1c3b-4aee-b8d7-fdb5cf577479 req-ea9436ce-3637-4b52-befc-130b8636136d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance_info_cache with network_info: [{"id": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "address": "fa:16:3e:c2:4a:2b", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc43b1-2f", "ovs_interfaceid": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.001510] env[68437]: DEBUG oslo_concurrency.lockutils [req-d82ac25f-1c3b-4aee-b8d7-fdb5cf577479 req-ea9436ce-3637-4b52-befc-130b8636136d service nova] Releasing lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1546.271064] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1546.271364] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1546.271950] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1546.272158] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1546.272336] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1546.274562] env[68437]: INFO nova.compute.manager [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Terminating instance [ 1546.778026] env[68437]: DEBUG nova.compute.manager [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1546.778306] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1546.779264] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8426d2c-54b3-4dd6-9d0f-4a4fb20f1573 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.787186] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1546.787406] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-355075f1-5c4b-406a-aa6d-8024cb78bb8c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.793055] env[68437]: DEBUG oslo_vmware.api [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1546.793055] env[68437]: value = "task-2945318" [ 1546.793055] env[68437]: _type = "Task" [ 1546.793055] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.800396] env[68437]: DEBUG oslo_vmware.api [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.303402] env[68437]: DEBUG oslo_vmware.api [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945318, 'name': PowerOffVM_Task, 'duration_secs': 0.171399} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.303807] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1547.303807] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1547.304057] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db533838-6021-4fb9-a69b-48f7db335652 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.366446] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1547.366665] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1547.366839] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Deleting the datastore file [datastore1] 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1547.367113] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3bea63a-6f16-425f-8423-7a3f2960e0ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.372688] env[68437]: DEBUG oslo_vmware.api [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1547.372688] env[68437]: value = "task-2945320" [ 1547.372688] env[68437]: _type = "Task" [ 1547.372688] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.380306] env[68437]: DEBUG oslo_vmware.api [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945320, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.882963] env[68437]: DEBUG oslo_vmware.api [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945320, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144399} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.883332] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1547.883519] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1547.883698] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1547.883872] env[68437]: INFO nova.compute.manager [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1547.884133] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1547.884337] env[68437]: DEBUG nova.compute.manager [-] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1547.884451] env[68437]: DEBUG nova.network.neutron [-] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1548.310521] env[68437]: DEBUG nova.compute.manager [req-43b48a51-3b08-4034-98f2-6010cf247814 req-7bdf9df8-65c9-490c-a7fd-36c889d5b403 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Received event network-vif-deleted-289592f8-bb26-4f97-ac37-15183e5f59e2 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1548.310881] env[68437]: INFO nova.compute.manager [req-43b48a51-3b08-4034-98f2-6010cf247814 req-7bdf9df8-65c9-490c-a7fd-36c889d5b403 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Neutron deleted interface 289592f8-bb26-4f97-ac37-15183e5f59e2; detaching it from the instance and deleting it from the info cache [ 1548.310881] env[68437]: DEBUG nova.network.neutron [req-43b48a51-3b08-4034-98f2-6010cf247814 req-7bdf9df8-65c9-490c-a7fd-36c889d5b403 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.791959] env[68437]: DEBUG nova.network.neutron [-] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.813648] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40b0daa8-e152-4ea5-a81a-d2823d32c65e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.823702] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f568621-49f9-49be-a648-05fb4924d1ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.849817] env[68437]: DEBUG nova.compute.manager [req-43b48a51-3b08-4034-98f2-6010cf247814 req-7bdf9df8-65c9-490c-a7fd-36c889d5b403 service nova] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Detach interface failed, port_id=289592f8-bb26-4f97-ac37-15183e5f59e2, reason: Instance 95b8784f-89e4-4ca3-b852-db9417e5b8b8 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1549.294614] env[68437]: INFO nova.compute.manager [-] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Took 1.41 seconds to deallocate network for instance. [ 1549.800886] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1549.801281] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1549.801383] env[68437]: DEBUG nova.objects.instance [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'resources' on Instance uuid 95b8784f-89e4-4ca3-b852-db9417e5b8b8 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1550.344015] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e53b1c-6e67-4a46-ac34-b0555a941310 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.351534] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ca1b41-07ab-4387-974e-b8163c53bfb3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.380665] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ffef9f-3fdd-4d59-ab8a-249c5766a663 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.387092] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55eaf65-7413-4342-8481-8b0cc68d42ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.399682] env[68437]: DEBUG nova.compute.provider_tree [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1550.903187] env[68437]: DEBUG nova.scheduler.client.report [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1551.409216] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.608s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1551.429714] env[68437]: INFO nova.scheduler.client.report [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Deleted allocations for instance 95b8784f-89e4-4ca3-b852-db9417e5b8b8 [ 1551.937963] env[68437]: DEBUG oslo_concurrency.lockutils [None req-1f2095a3-3585-4f11-a05c-d93735a21032 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "95b8784f-89e4-4ca3-b852-db9417e5b8b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.666s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1554.193674] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1554.194029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1554.696720] env[68437]: DEBUG nova.compute.manager [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1555.217742] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1555.218100] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1555.219590] env[68437]: INFO nova.compute.claims [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1556.264892] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8268d5a-83b5-49f4-9e9e-715e77ef5f69 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.272433] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75082f7a-ce3a-4d01-9e24-5adf186caa49 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.302553] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4b1966-8515-48e5-8550-992548ab1d64 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.309664] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51af7148-37d9-4ca2-ab1e-08b1589ec54d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.322248] env[68437]: DEBUG nova.compute.provider_tree [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1556.825715] env[68437]: DEBUG nova.scheduler.client.report [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1557.331058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1557.331621] env[68437]: DEBUG nova.compute.manager [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1557.836624] env[68437]: DEBUG nova.compute.utils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1557.838418] env[68437]: DEBUG nova.compute.manager [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1557.838594] env[68437]: DEBUG nova.network.neutron [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1557.885073] env[68437]: DEBUG nova.policy [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c41fdd9aa844ec4aef0f0d2989ea63b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '490b965164a14c9faf6b0329886d617e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1558.148547] env[68437]: DEBUG nova.network.neutron [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Successfully created port: d9010969-e902-406d-85bd-7b15f516910f {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1558.342509] env[68437]: DEBUG nova.compute.manager [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1559.354063] env[68437]: DEBUG nova.compute.manager [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1559.435571] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1559.435571] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1559.435803] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1559.435803] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1559.436151] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1559.436151] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1559.436281] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1559.436419] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1559.436623] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1559.436754] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1559.436928] env[68437]: DEBUG nova.virt.hardware [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1559.437858] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd5e888-1f85-410b-8c0b-90a553c38c30 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.445833] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86dc5517-6f6f-4b8d-9dfc-cc4e33e0310f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.490553] env[68437]: DEBUG nova.compute.manager [req-8687f4df-197b-4a9d-8483-e3d381a64ced req-e027e82d-736b-4726-aaea-a450833a876a service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Received event network-vif-plugged-d9010969-e902-406d-85bd-7b15f516910f {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1559.490553] env[68437]: DEBUG oslo_concurrency.lockutils [req-8687f4df-197b-4a9d-8483-e3d381a64ced req-e027e82d-736b-4726-aaea-a450833a876a service nova] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1559.490734] env[68437]: DEBUG oslo_concurrency.lockutils [req-8687f4df-197b-4a9d-8483-e3d381a64ced req-e027e82d-736b-4726-aaea-a450833a876a service nova] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1559.490906] env[68437]: DEBUG oslo_concurrency.lockutils [req-8687f4df-197b-4a9d-8483-e3d381a64ced req-e027e82d-736b-4726-aaea-a450833a876a service nova] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1559.491054] env[68437]: DEBUG nova.compute.manager [req-8687f4df-197b-4a9d-8483-e3d381a64ced req-e027e82d-736b-4726-aaea-a450833a876a service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] No waiting events found dispatching network-vif-plugged-d9010969-e902-406d-85bd-7b15f516910f {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1559.491223] env[68437]: WARNING nova.compute.manager [req-8687f4df-197b-4a9d-8483-e3d381a64ced req-e027e82d-736b-4726-aaea-a450833a876a service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Received unexpected event network-vif-plugged-d9010969-e902-406d-85bd-7b15f516910f for instance with vm_state building and task_state spawning. [ 1559.569107] env[68437]: DEBUG nova.network.neutron [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Successfully updated port: d9010969-e902-406d-85bd-7b15f516910f {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1560.073056] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "refresh_cache-a5ce6701-f5d2-4eb7-9d6c-3ace121de308" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.073056] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired lock "refresh_cache-a5ce6701-f5d2-4eb7-9d6c-3ace121de308" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1560.073056] env[68437]: DEBUG nova.network.neutron [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1560.629045] env[68437]: DEBUG nova.network.neutron [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1560.747783] env[68437]: DEBUG nova.network.neutron [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Updating instance_info_cache with network_info: [{"id": "d9010969-e902-406d-85bd-7b15f516910f", "address": "fa:16:3e:a2:50:cf", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9010969-e9", "ovs_interfaceid": "d9010969-e902-406d-85bd-7b15f516910f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.250438] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Releasing lock "refresh_cache-a5ce6701-f5d2-4eb7-9d6c-3ace121de308" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1561.250786] env[68437]: DEBUG nova.compute.manager [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Instance network_info: |[{"id": "d9010969-e902-406d-85bd-7b15f516910f", "address": "fa:16:3e:a2:50:cf", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9010969-e9", "ovs_interfaceid": "d9010969-e902-406d-85bd-7b15f516910f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1561.251248] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:50:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9c1b8689-a9b4-4972-beb9-6a1c8de1dc88', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9010969-e902-406d-85bd-7b15f516910f', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1561.258583] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1561.258798] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1561.259037] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d089a6ac-fcfb-40b6-b58e-910ecbb37977 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.279487] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1561.279487] env[68437]: value = "task-2945321" [ 1561.279487] env[68437]: _type = "Task" [ 1561.279487] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.287953] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945321, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.516558] env[68437]: DEBUG nova.compute.manager [req-11e47f65-4b56-4a40-8a7a-9f87c47928a3 req-eb18d1fa-3f57-448f-a8c3-02b5b1086f09 service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Received event network-changed-d9010969-e902-406d-85bd-7b15f516910f {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1561.516735] env[68437]: DEBUG nova.compute.manager [req-11e47f65-4b56-4a40-8a7a-9f87c47928a3 req-eb18d1fa-3f57-448f-a8c3-02b5b1086f09 service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Refreshing instance network info cache due to event network-changed-d9010969-e902-406d-85bd-7b15f516910f. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1561.516969] env[68437]: DEBUG oslo_concurrency.lockutils [req-11e47f65-4b56-4a40-8a7a-9f87c47928a3 req-eb18d1fa-3f57-448f-a8c3-02b5b1086f09 service nova] Acquiring lock "refresh_cache-a5ce6701-f5d2-4eb7-9d6c-3ace121de308" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.517110] env[68437]: DEBUG oslo_concurrency.lockutils [req-11e47f65-4b56-4a40-8a7a-9f87c47928a3 req-eb18d1fa-3f57-448f-a8c3-02b5b1086f09 service nova] Acquired lock "refresh_cache-a5ce6701-f5d2-4eb7-9d6c-3ace121de308" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1561.517263] env[68437]: DEBUG nova.network.neutron [req-11e47f65-4b56-4a40-8a7a-9f87c47928a3 req-eb18d1fa-3f57-448f-a8c3-02b5b1086f09 service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Refreshing network info cache for port d9010969-e902-406d-85bd-7b15f516910f {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1561.790138] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945321, 'name': CreateVM_Task, 'duration_secs': 0.302922} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.790519] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1561.790979] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.791162] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1561.791481] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1561.791724] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04a3cd69-05e6-445d-9471-108e9f921fe5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.796441] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1561.796441] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52e7c995-0321-504f-8a41-9f2c46f2bc87" [ 1561.796441] env[68437]: _type = "Task" [ 1561.796441] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.803464] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e7c995-0321-504f-8a41-9f2c46f2bc87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.196497] env[68437]: DEBUG nova.network.neutron [req-11e47f65-4b56-4a40-8a7a-9f87c47928a3 req-eb18d1fa-3f57-448f-a8c3-02b5b1086f09 service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Updated VIF entry in instance network info cache for port d9010969-e902-406d-85bd-7b15f516910f. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1562.197031] env[68437]: DEBUG nova.network.neutron [req-11e47f65-4b56-4a40-8a7a-9f87c47928a3 req-eb18d1fa-3f57-448f-a8c3-02b5b1086f09 service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Updating instance_info_cache with network_info: [{"id": "d9010969-e902-406d-85bd-7b15f516910f", "address": "fa:16:3e:a2:50:cf", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9010969-e9", "ovs_interfaceid": "d9010969-e902-406d-85bd-7b15f516910f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.306713] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52e7c995-0321-504f-8a41-9f2c46f2bc87, 'name': SearchDatastore_Task, 'duration_secs': 0.01071} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.306961] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1562.307196] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1562.307429] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.307605] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1562.307801] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1562.308815] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25ba01d1-6c53-4975-880a-d823ed4f1619 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.315953] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1562.316136] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1562.316802] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f491e42-3918-46ab-81bd-5898d9f6a430 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.321494] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1562.321494] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5255c37b-3d00-8c27-46fb-da4bbe6da78c" [ 1562.321494] env[68437]: _type = "Task" [ 1562.321494] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.328200] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5255c37b-3d00-8c27-46fb-da4bbe6da78c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.699842] env[68437]: DEBUG oslo_concurrency.lockutils [req-11e47f65-4b56-4a40-8a7a-9f87c47928a3 req-eb18d1fa-3f57-448f-a8c3-02b5b1086f09 service nova] Releasing lock "refresh_cache-a5ce6701-f5d2-4eb7-9d6c-3ace121de308" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1562.832375] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5255c37b-3d00-8c27-46fb-da4bbe6da78c, 'name': SearchDatastore_Task, 'duration_secs': 0.008029} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.833136] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a4d757b-5a45-40c4-a885-bb715b6849cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.837658] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1562.837658] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5286264d-6747-96c3-46bc-4a3a37d3b974" [ 1562.837658] env[68437]: _type = "Task" [ 1562.837658] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.844580] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5286264d-6747-96c3-46bc-4a3a37d3b974, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.347756] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5286264d-6747-96c3-46bc-4a3a37d3b974, 'name': SearchDatastore_Task, 'duration_secs': 0.009838} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.348042] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1563.348313] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] a5ce6701-f5d2-4eb7-9d6c-3ace121de308/a5ce6701-f5d2-4eb7-9d6c-3ace121de308.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1563.348570] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcdd82a1-4fba-443d-8947-342b337f4e23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.355237] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1563.355237] env[68437]: value = "task-2945322" [ 1563.355237] env[68437]: _type = "Task" [ 1563.355237] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.362508] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.865219] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.409258} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.865549] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] a5ce6701-f5d2-4eb7-9d6c-3ace121de308/a5ce6701-f5d2-4eb7-9d6c-3ace121de308.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1563.865689] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1563.865932] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2e92a01-edba-446b-8ffc-4d26275b2932 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.872385] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1563.872385] env[68437]: value = "task-2945323" [ 1563.872385] env[68437]: _type = "Task" [ 1563.872385] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.880891] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945323, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.382156] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945323, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100812} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.382425] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1564.383202] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41f32bd-4e77-4be2-9cb1-bfda81bd43d9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.404561] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] a5ce6701-f5d2-4eb7-9d6c-3ace121de308/a5ce6701-f5d2-4eb7-9d6c-3ace121de308.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1564.404769] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2c9899e-86b1-4272-9d45-663048e16c2d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.424120] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1564.424120] env[68437]: value = "task-2945324" [ 1564.424120] env[68437]: _type = "Task" [ 1564.424120] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.431360] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945324, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.933694] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945324, 'name': ReconfigVM_Task, 'duration_secs': 0.322871} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.934475] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfigured VM instance instance-0000007c to attach disk [datastore1] a5ce6701-f5d2-4eb7-9d6c-3ace121de308/a5ce6701-f5d2-4eb7-9d6c-3ace121de308.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1564.935266] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-982ed3bf-b352-4e7b-af7b-5e6122404f24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.942048] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1564.942048] env[68437]: value = "task-2945325" [ 1564.942048] env[68437]: _type = "Task" [ 1564.942048] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.949061] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945325, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.451345] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945325, 'name': Rename_Task, 'duration_secs': 0.135106} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.451635] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1565.451883] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-950cf4a6-ead1-419e-8f86-38fcdafcf0a1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.458995] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1565.458995] env[68437]: value = "task-2945326" [ 1565.458995] env[68437]: _type = "Task" [ 1565.458995] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.466490] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.971774] env[68437]: DEBUG oslo_vmware.api [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945326, 'name': PowerOnVM_Task, 'duration_secs': 0.449847} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.972215] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1565.972461] env[68437]: INFO nova.compute.manager [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1565.972731] env[68437]: DEBUG nova.compute.manager [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1565.973987] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180e153d-17e7-4d59-90db-24ba7810d355 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.492775] env[68437]: INFO nova.compute.manager [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Took 11.29 seconds to build instance. [ 1566.995394] env[68437]: DEBUG oslo_concurrency.lockutils [None req-204a2ae5-1dec-42ec-b010-a376df26d3c5 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.801s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1567.085241] env[68437]: DEBUG nova.compute.manager [req-d32b38ca-c05c-493d-9432-da9e6e7503dd req-ba8a3f45-5b51-49dd-929a-6b48c0891c9c service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Received event network-changed-d9010969-e902-406d-85bd-7b15f516910f {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1567.085445] env[68437]: DEBUG nova.compute.manager [req-d32b38ca-c05c-493d-9432-da9e6e7503dd req-ba8a3f45-5b51-49dd-929a-6b48c0891c9c service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Refreshing instance network info cache due to event network-changed-d9010969-e902-406d-85bd-7b15f516910f. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1567.085658] env[68437]: DEBUG oslo_concurrency.lockutils [req-d32b38ca-c05c-493d-9432-da9e6e7503dd req-ba8a3f45-5b51-49dd-929a-6b48c0891c9c service nova] Acquiring lock "refresh_cache-a5ce6701-f5d2-4eb7-9d6c-3ace121de308" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.085803] env[68437]: DEBUG oslo_concurrency.lockutils [req-d32b38ca-c05c-493d-9432-da9e6e7503dd req-ba8a3f45-5b51-49dd-929a-6b48c0891c9c service nova] Acquired lock "refresh_cache-a5ce6701-f5d2-4eb7-9d6c-3ace121de308" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1567.085966] env[68437]: DEBUG nova.network.neutron [req-d32b38ca-c05c-493d-9432-da9e6e7503dd req-ba8a3f45-5b51-49dd-929a-6b48c0891c9c service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Refreshing network info cache for port d9010969-e902-406d-85bd-7b15f516910f {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1567.793638] env[68437]: DEBUG nova.network.neutron [req-d32b38ca-c05c-493d-9432-da9e6e7503dd req-ba8a3f45-5b51-49dd-929a-6b48c0891c9c service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Updated VIF entry in instance network info cache for port d9010969-e902-406d-85bd-7b15f516910f. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1567.793991] env[68437]: DEBUG nova.network.neutron [req-d32b38ca-c05c-493d-9432-da9e6e7503dd req-ba8a3f45-5b51-49dd-929a-6b48c0891c9c service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Updating instance_info_cache with network_info: [{"id": "d9010969-e902-406d-85bd-7b15f516910f", "address": "fa:16:3e:a2:50:cf", "network": {"id": "2c0d518c-94e8-412e-bdd1-35e57e1199e1", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-320693712-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "490b965164a14c9faf6b0329886d617e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9c1b8689-a9b4-4972-beb9-6a1c8de1dc88", "external-id": "nsx-vlan-transportzone-455", "segmentation_id": 455, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9010969-e9", "ovs_interfaceid": "d9010969-e902-406d-85bd-7b15f516910f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.296539] env[68437]: DEBUG oslo_concurrency.lockutils [req-d32b38ca-c05c-493d-9432-da9e6e7503dd req-ba8a3f45-5b51-49dd-929a-6b48c0891c9c service nova] Releasing lock "refresh_cache-a5ce6701-f5d2-4eb7-9d6c-3ace121de308" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1570.560527] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "0f078ed3-d253-4bc4-901c-3c84027392b4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1570.560804] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1571.064800] env[68437]: DEBUG nova.compute.utils [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1571.568023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1572.625103] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "0f078ed3-d253-4bc4-901c-3c84027392b4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1572.625500] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1572.625698] env[68437]: INFO nova.compute.manager [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Attaching volume 4db1d8c0-6584-433c-a0b6-6f117e112fa3 to /dev/sdb [ 1572.655680] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b6a7f2-feb3-4f53-a54d-f1c61e48fbbd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.663222] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313143b4-c928-47cb-bb88-4d983a955c41 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.676553] env[68437]: DEBUG nova.virt.block_device [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating existing volume attachment record: 281e413c-2cab-44d6-ae32-862ac5fef5f2 {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1573.921343] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.921749] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.921749] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1574.231741] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1575.225889] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1575.226192] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1576.230553] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1576.230951] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1577.219659] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1577.219908] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591183', 'volume_id': '4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'name': 'volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0f078ed3-d253-4bc4-901c-3c84027392b4', 'attached_at': '', 'detached_at': '', 'volume_id': '4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'serial': '4db1d8c0-6584-433c-a0b6-6f117e112fa3'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1577.220892] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156b6f39-9a20-477c-9c18-8ff65a670b16 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.236734] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1577.237613] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b02891-d7d2-4e43-b316-e3cf6443a191 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.260910] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3/volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1577.261140] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46575dbb-006d-4e05-a140-fa57bee5b393 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.278917] env[68437]: DEBUG oslo_vmware.api [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1577.278917] env[68437]: value = "task-2945331" [ 1577.278917] env[68437]: _type = "Task" [ 1577.278917] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.285858] env[68437]: DEBUG oslo_vmware.api [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945331, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.788319] env[68437]: DEBUG oslo_vmware.api [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945331, 'name': ReconfigVM_Task, 'duration_secs': 0.324062} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.788590] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3/volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1577.793237] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9ae367d-0d27-4113-81f1-e2d08a8f2df4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.807242] env[68437]: DEBUG oslo_vmware.api [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1577.807242] env[68437]: value = "task-2945332" [ 1577.807242] env[68437]: _type = "Task" [ 1577.807242] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.815687] env[68437]: DEBUG oslo_vmware.api [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945332, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.230767] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1578.316232] env[68437]: DEBUG oslo_vmware.api [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945332, 'name': ReconfigVM_Task, 'duration_secs': 0.140664} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.316541] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591183', 'volume_id': '4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'name': 'volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0f078ed3-d253-4bc4-901c-3c84027392b4', 'attached_at': '', 'detached_at': '', 'volume_id': '4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'serial': '4db1d8c0-6584-433c-a0b6-6f117e112fa3'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1578.733717] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1578.733998] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1578.734188] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1578.734347] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1578.735245] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fdd256-8159-4f7a-ab61-563fecb0561c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.744327] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010f1745-8182-44ab-83b0-7efd75013aa0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.757390] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f320fc2-c7ac-40d0-aad7-5ae7dd8db792 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.763248] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbf48f0-4001-4b61-809c-0eee28e9109d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.790746] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181022MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1578.790891] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1578.791102] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1579.350030] env[68437]: DEBUG nova.objects.instance [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'flavor' on Instance uuid 0f078ed3-d253-4bc4-901c-3c84027392b4 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1579.810726] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance a5ce6701-f5d2-4eb7-9d6c-3ace121de308 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1579.854581] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a20a1796-7fff-42c0-84b2-1283dd8c545a tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.229s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1580.313884] env[68437]: INFO nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 0394dc74-f340-4b01-9de8-17023546394b has allocations against this compute host but is not found in the database. [ 1580.314096] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1580.314248] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1580.358271] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab65538a-44fb-4c39-b99c-a082e94e20ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.365008] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb66982-27c7-46ef-9c46-e3e99fb9be6e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.394624] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d82c1ae-a4b3-444c-8eaf-ec71287604ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.401230] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9194ed1-cc13-4f1a-a533-73d667779bd5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.413731] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1580.760363] env[68437]: DEBUG nova.compute.manager [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Stashing vm_state: active {{(pid=68437) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1580.917275] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1581.276663] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1581.422591] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1581.422998] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.632s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1581.423126] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.147s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1581.928734] env[68437]: INFO nova.compute.claims [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1582.435083] env[68437]: INFO nova.compute.resource_tracker [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating resource usage from migration 0394dc74-f340-4b01-9de8-17023546394b [ 1582.480811] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5430a14e-e76c-4496-9ff4-cc40f22f4943 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.487782] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30cd09c-239a-49f2-8203-ef7a098c8d24 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.517875] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0cf7c2-8716-434b-afb3-e55eed5e819c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.524398] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469cd175-e615-4f3a-a2cc-0a5367ed5a5d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.537060] env[68437]: DEBUG nova.compute.provider_tree [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1583.039886] env[68437]: DEBUG nova.scheduler.client.report [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1583.544912] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.122s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1583.545305] env[68437]: INFO nova.compute.manager [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Migrating [ 1584.058831] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.059041] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1584.059228] env[68437]: DEBUG nova.network.neutron [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1584.768789] env[68437]: DEBUG nova.network.neutron [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance_info_cache with network_info: [{"id": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "address": "fa:16:3e:c2:4a:2b", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc43b1-2f", "ovs_interfaceid": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.271644] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1586.786312] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c00415f-9aca-49a3-97f5-f6abbd393d68 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.808235] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance '0f078ed3-d253-4bc4-901c-3c84027392b4' progress to 0 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1587.314239] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1587.314570] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe727962-3f09-4e53-acfc-bc2446567e36 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.322257] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1587.322257] env[68437]: value = "task-2945333" [ 1587.322257] env[68437]: _type = "Task" [ 1587.322257] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.330140] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.832365] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945333, 'name': PowerOffVM_Task, 'duration_secs': 0.232632} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.832825] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1587.832825] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance '0f078ed3-d253-4bc4-901c-3c84027392b4' progress to 17 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1588.340031] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:04Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1588.340179] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1588.340313] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1588.340526] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1588.340684] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1588.340835] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1588.341048] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1588.341214] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1588.341395] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1588.341614] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1588.341795] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1588.346880] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1af8e27-b082-45e1-92b9-04ebd172c27c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.362514] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1588.362514] env[68437]: value = "task-2945334" [ 1588.362514] env[68437]: _type = "Task" [ 1588.362514] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.369954] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945334, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.873060] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945334, 'name': ReconfigVM_Task, 'duration_secs': 0.178255} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.873467] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance '0f078ed3-d253-4bc4-901c-3c84027392b4' progress to 33 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1589.379739] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1589.379996] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1589.380187] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1589.380375] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1589.380565] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1589.380724] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1589.380932] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1589.381124] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1589.381304] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1589.381470] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1589.381676] env[68437]: DEBUG nova.virt.hardware [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1589.386893] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1589.387198] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19c8f027-0cdb-45ff-a36d-cd6d216b1ecb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.408049] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1589.408049] env[68437]: value = "task-2945335" [ 1589.408049] env[68437]: _type = "Task" [ 1589.408049] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.415450] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945335, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.919023] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945335, 'name': ReconfigVM_Task, 'duration_secs': 0.181116} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.919023] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1589.919517] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50aea60b-7498-4b39-a62d-4b45a76fce2f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.943986] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 0f078ed3-d253-4bc4-901c-3c84027392b4/0f078ed3-d253-4bc4-901c-3c84027392b4.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1589.944564] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72bdea5f-7636-49b0-bb47-f38f36531c6e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.962458] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1589.962458] env[68437]: value = "task-2945336" [ 1589.962458] env[68437]: _type = "Task" [ 1589.962458] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.970046] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945336, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.472833] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945336, 'name': ReconfigVM_Task, 'duration_secs': 0.268526} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.473143] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 0f078ed3-d253-4bc4-901c-3c84027392b4/0f078ed3-d253-4bc4-901c-3c84027392b4.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1590.473429] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance '0f078ed3-d253-4bc4-901c-3c84027392b4' progress to 50 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1590.980624] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3321ea-16ba-43f2-94d0-2b7327e096a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.001552] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99509de4-b245-4dc3-80c3-de10cc1c661f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.020592] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance '0f078ed3-d253-4bc4-901c-3c84027392b4' progress to 67 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1592.651915] env[68437]: DEBUG nova.network.neutron [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Port fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 1593.672452] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "0f078ed3-d253-4bc4-901c-3c84027392b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1593.672452] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1593.672878] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1594.706424] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.706707] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1594.706809] env[68437]: DEBUG nova.network.neutron [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1595.403914] env[68437]: DEBUG nova.network.neutron [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance_info_cache with network_info: [{"id": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "address": "fa:16:3e:c2:4a:2b", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc43b1-2f", "ovs_interfaceid": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.906956] env[68437]: DEBUG oslo_concurrency.lockutils [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1596.416523] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c482dd27-d84a-40e2-a066-f08ef58af9dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.423730] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf159a69-93c8-4abe-951d-3885755b54b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.516597] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1f4532-4c06-431b-a384-eee4361b6b48 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.539552] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd80138-4f66-466c-a4a9-5ae58cb53d99 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.546245] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance '0f078ed3-d253-4bc4-901c-3c84027392b4' progress to 83 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1598.053056] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1598.053056] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a591c843-98cb-4c5a-8180-520a36cbe20a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.060513] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1598.060513] env[68437]: value = "task-2945337" [ 1598.060513] env[68437]: _type = "Task" [ 1598.060513] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.068416] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.571078] env[68437]: DEBUG oslo_vmware.api [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945337, 'name': PowerOnVM_Task, 'duration_secs': 0.376548} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.571459] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1598.571504] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-2155c4fd-1134-4c70-9642-f798b6d080ba tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance '0f078ed3-d253-4bc4-901c-3c84027392b4' progress to 100 {{(pid=68437) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1601.202625] env[68437]: DEBUG nova.network.neutron [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Port fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 binding to destination host cpu-1 is already ACTIVE {{(pid=68437) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3229}} [ 1601.202960] env[68437]: DEBUG oslo_concurrency.lockutils [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.203062] env[68437]: DEBUG oslo_concurrency.lockutils [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1601.203231] env[68437]: DEBUG nova.network.neutron [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1601.895467] env[68437]: DEBUG nova.network.neutron [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance_info_cache with network_info: [{"id": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "address": "fa:16:3e:c2:4a:2b", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc43b1-2f", "ovs_interfaceid": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.398308] env[68437]: DEBUG oslo_concurrency.lockutils [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1602.901997] env[68437]: DEBUG nova.compute.manager [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68437) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1603.999397] env[68437]: DEBUG oslo_concurrency.lockutils [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1603.999711] env[68437]: DEBUG oslo_concurrency.lockutils [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1604.502702] env[68437]: DEBUG nova.objects.instance [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'migration_context' on Instance uuid 0f078ed3-d253-4bc4-901c-3c84027392b4 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1605.022461] env[68437]: DEBUG nova.scheduler.client.report [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Refreshing inventories for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1605.036182] env[68437]: DEBUG nova.scheduler.client.report [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Updating ProviderTree inventory for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1605.036426] env[68437]: DEBUG nova.compute.provider_tree [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Updating inventory in ProviderTree for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1605.046910] env[68437]: DEBUG nova.scheduler.client.report [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Refreshing aggregate associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, aggregates: None {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1605.064559] env[68437]: DEBUG nova.scheduler.client.report [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Refreshing trait associations for resource provider 422e986f-b38b-46ad-94b3-91f3ccd10a05, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=68437) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1605.110791] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1304020-c0b6-4d48-b2a3-f6140a2ddb50 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.118741] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e906bc-93f4-4c00-922d-b7c84e495303 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.148431] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed88226-f3cc-4741-b12c-6c72c320ee95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.155171] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29568009-848a-4f1f-b660-4e03785bb8ba {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.168682] env[68437]: DEBUG nova.compute.provider_tree [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1605.671957] env[68437]: DEBUG nova.scheduler.client.report [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1606.089068] env[68437]: DEBUG oslo_concurrency.lockutils [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1606.089409] env[68437]: DEBUG oslo_concurrency.lockutils [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1606.593410] env[68437]: DEBUG nova.compute.utils [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1606.683551] env[68437]: DEBUG oslo_concurrency.lockutils [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.684s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1607.096246] env[68437]: DEBUG oslo_concurrency.lockutils [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1608.158388] env[68437]: DEBUG oslo_concurrency.lockutils [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1608.158842] env[68437]: DEBUG oslo_concurrency.lockutils [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1608.158966] env[68437]: INFO nova.compute.manager [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Attaching volume 1b0d0953-a048-4d0d-87b0-d97e7ed651b9 to /dev/sdb [ 1608.194941] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef38848a-b462-4a87-a2ec-0f5d7a71cd28 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.202681] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c0b912-1c7d-46e8-b432-0f300fc9e47b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.218959] env[68437]: DEBUG nova.virt.block_device [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Updating existing volume attachment record: 2fdf99bb-9179-4071-9452-10e095b0bc44 {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1608.223361] env[68437]: INFO nova.compute.manager [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Swapping old allocation on dict_keys(['422e986f-b38b-46ad-94b3-91f3ccd10a05']) held by migration 0394dc74-f340-4b01-9de8-17023546394b for instance [ 1608.243734] env[68437]: DEBUG nova.scheduler.client.report [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Overwriting current allocation {'allocations': {'422e986f-b38b-46ad-94b3-91f3ccd10a05': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 172}}, 'project_id': 'aa08e0c3081143cbb8f4d00d7e5cf222', 'user_id': 'd5f65dac5fd04c59b33cb454ee1c3e2b', 'consumer_generation': 1} on consumer 0f078ed3-d253-4bc4-901c-3c84027392b4 {{(pid=68437) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1608.331166] env[68437]: DEBUG oslo_concurrency.lockutils [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.331374] env[68437]: DEBUG oslo_concurrency.lockutils [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1608.331642] env[68437]: DEBUG nova.network.neutron [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1609.035943] env[68437]: DEBUG nova.network.neutron [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance_info_cache with network_info: [{"id": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "address": "fa:16:3e:c2:4a:2b", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcbc43b1-2f", "ovs_interfaceid": "fcbc43b1-2f65-489a-a9b4-5b1fa5f71458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.539094] env[68437]: DEBUG oslo_concurrency.lockutils [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-0f078ed3-d253-4bc4-901c-3c84027392b4" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1609.540116] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2b5aba-39e7-4a35-89ac-030cd2a19fab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.547323] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e081f02-95a2-47c2-bf99-6bbefa341975 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.640605] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1610.640950] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6e75e1e-2555-4d88-b588-cfcca37a83a8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.648247] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1610.648247] env[68437]: value = "task-2945340" [ 1610.648247] env[68437]: _type = "Task" [ 1610.648247] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.656158] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945340, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.157345] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945340, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.659421] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945340, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.159954] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945340, 'name': PowerOffVM_Task, 'duration_secs': 1.206268} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.160728] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1612.161761] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1612.161761] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1612.161761] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1612.161761] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1612.162045] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1612.162045] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1612.162261] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1612.162430] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1612.162597] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1612.162756] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1612.162929] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1612.167761] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9311bf8f-ecbc-4cbb-a160-1e893ef8e2c6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.182668] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1612.182668] env[68437]: value = "task-2945341" [ 1612.182668] env[68437]: _type = "Task" [ 1612.182668] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.190252] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945341, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.693586] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945341, 'name': ReconfigVM_Task, 'duration_secs': 0.195547} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.694405] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5dd72e-df9f-4c5f-a559-844dfdba8945 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.716255] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1612.716490] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1612.716666] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1612.716857] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1612.717018] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1612.717176] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1612.717384] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1612.717542] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1612.717709] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1612.717868] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1612.718061] env[68437]: DEBUG nova.virt.hardware [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1612.718789] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d07054b-e69d-4f56-996f-13fa46532454 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.723522] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1612.723522] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d6d41c-0cc3-db5a-5351-471cf58e6b7f" [ 1612.723522] env[68437]: _type = "Task" [ 1612.723522] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.730824] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d6d41c-0cc3-db5a-5351-471cf58e6b7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.761300] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1612.761545] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591184', 'volume_id': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'name': 'volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a5ce6701-f5d2-4eb7-9d6c-3ace121de308', 'attached_at': '', 'detached_at': '', 'volume_id': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'serial': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1612.762315] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0602b2-b91c-4d80-bf8d-9207fec227b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.777191] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f032b30-d6e7-42eb-82fa-fef36195dc20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.799738] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9/volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1612.799942] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bb040482-61b6-4ec8-abee-921f9287bf32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.816309] env[68437]: DEBUG oslo_vmware.api [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1612.816309] env[68437]: value = "task-2945342" [ 1612.816309] env[68437]: _type = "Task" [ 1612.816309] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.823442] env[68437]: DEBUG oslo_vmware.api [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945342, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.234086] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d6d41c-0cc3-db5a-5351-471cf58e6b7f, 'name': SearchDatastore_Task, 'duration_secs': 0.010249} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.239419] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1613.239711] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c7e65bc-ddc2-4bf8-bfb7-15583af78000 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.257900] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1613.257900] env[68437]: value = "task-2945343" [ 1613.257900] env[68437]: _type = "Task" [ 1613.257900] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.265907] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945343, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.325861] env[68437]: DEBUG oslo_vmware.api [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945342, 'name': ReconfigVM_Task, 'duration_secs': 0.34184} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.326134] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9/volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1613.330823] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5463cddb-0bab-4845-8bce-49c5ad00c856 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.346344] env[68437]: DEBUG oslo_vmware.api [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1613.346344] env[68437]: value = "task-2945344" [ 1613.346344] env[68437]: _type = "Task" [ 1613.346344] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.354625] env[68437]: DEBUG oslo_vmware.api [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945344, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.768392] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945343, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.859762] env[68437]: DEBUG oslo_vmware.api [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.270062] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945343, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.358374] env[68437]: DEBUG oslo_vmware.api [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945344, 'name': ReconfigVM_Task, 'duration_secs': 0.911223} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.358730] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591184', 'volume_id': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'name': 'volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a5ce6701-f5d2-4eb7-9d6c-3ace121de308', 'attached_at': '', 'detached_at': '', 'volume_id': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'serial': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1614.770142] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945343, 'name': ReconfigVM_Task, 'duration_secs': 1.044806} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.770554] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1614.771221] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea9b5fe-8c7a-4241-bb06-a6235166cb33 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.794721] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 0f078ed3-d253-4bc4-901c-3c84027392b4/0f078ed3-d253-4bc4-901c-3c84027392b4.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1614.794958] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2860ea2d-ee16-4819-96b6-8406116ba71a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.811870] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1614.811870] env[68437]: value = "task-2945345" [ 1614.811870] env[68437]: _type = "Task" [ 1614.811870] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.819133] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945345, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.321757] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945345, 'name': ReconfigVM_Task, 'duration_secs': 0.265167} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.322064] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 0f078ed3-d253-4bc4-901c-3c84027392b4/0f078ed3-d253-4bc4-901c-3c84027392b4.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1615.322908] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7551b88-cea9-412a-8a0b-e2593b1bf840 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.343341] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2038a7-d944-4c33-bcaf-c6134582038d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.363325] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67443fde-aa91-4cd1-82b9-eb69e21fb32c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.383748] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d70a2d-8579-4f0b-8ccd-9491bb18b5c2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.389960] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1615.390211] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6017372-484d-428d-84b1-a8a40d66c247 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.395638] env[68437]: DEBUG nova.objects.instance [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid a5ce6701-f5d2-4eb7-9d6c-3ace121de308 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1615.397465] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1615.397465] env[68437]: value = "task-2945346" [ 1615.397465] env[68437]: _type = "Task" [ 1615.397465] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.405115] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945346, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.904300] env[68437]: DEBUG oslo_concurrency.lockutils [None req-831e1283-87a9-4faf-93ce-c910b77def31 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.746s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1615.909698] env[68437]: DEBUG oslo_vmware.api [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945346, 'name': PowerOnVM_Task, 'duration_secs': 0.35121} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.910265] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1616.609527] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1616.609767] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1616.951091] env[68437]: INFO nova.compute.manager [None req-380ec950-0416-4ea1-be17-adc1ebc226d2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance to original state: 'active' [ 1617.112968] env[68437]: DEBUG nova.compute.utils [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1617.615710] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1618.208738] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "0f078ed3-d253-4bc4-901c-3c84027392b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1618.209044] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1618.209225] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "0f078ed3-d253-4bc4-901c-3c84027392b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1618.209410] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1618.209576] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1618.211566] env[68437]: INFO nova.compute.manager [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Terminating instance [ 1618.678925] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1618.679248] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1618.679499] env[68437]: INFO nova.compute.manager [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Attaching volume 385465c8-d52f-4f5c-902a-a6b4468fd163 to /dev/sdc [ 1618.710155] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97a6a8c-b62f-4b1e-99c4-c3f6adcac880 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.715490] env[68437]: DEBUG nova.compute.manager [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1618.715867] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1618.716229] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34b79a90-6463-4cb4-bdb7-e8dd3330a62d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.725425] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa5102b-cf85-424b-9cb0-342be88bea1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.727813] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1618.727813] env[68437]: value = "task-2945347" [ 1618.727813] env[68437]: _type = "Task" [ 1618.727813] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.739295] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.743062] env[68437]: DEBUG nova.virt.block_device [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Updating existing volume attachment record: 23d1071e-5464-4551-ada2-c0a5ed119abf {{(pid=68437) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1619.265205] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945347, 'name': PowerOffVM_Task, 'duration_secs': 0.197144} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.265205] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1619.265205] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1619.265205] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591183', 'volume_id': '4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'name': 'volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '0f078ed3-d253-4bc4-901c-3c84027392b4', 'attached_at': '2025-03-11T18:53:41.000000', 'detached_at': '', 'volume_id': '4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'serial': '4db1d8c0-6584-433c-a0b6-6f117e112fa3'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1619.265205] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9951cf0c-1e95-44d7-942b-2a29b8a469d6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.265205] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8576b90a-e6bd-4c5a-be82-849ce5a68c55 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.272649] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a825ec-a77a-47b1-bd6a-69b9cc8bffd3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.292821] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513b6cd0-d950-446b-b65c-b52b8b604402 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.307277] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] The volume has not been displaced from its original location: [datastore1] volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3/volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1619.312687] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1619.312987] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59ca7f37-2953-45bf-a7d8-bf3afd0bac32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.331931] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1619.331931] env[68437]: value = "task-2945349" [ 1619.331931] env[68437]: _type = "Task" [ 1619.331931] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.340745] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945349, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.841246] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945349, 'name': ReconfigVM_Task, 'duration_secs': 0.196574} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.841499] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1619.846052] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84bd2cb1-6090-4eb6-a484-358754502c13 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.862671] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1619.862671] env[68437]: value = "task-2945350" [ 1619.862671] env[68437]: _type = "Task" [ 1619.862671] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.872494] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945350, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.372741] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945350, 'name': ReconfigVM_Task, 'duration_secs': 0.149228} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.372741] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591183', 'volume_id': '4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'name': 'volume-4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '0f078ed3-d253-4bc4-901c-3c84027392b4', 'attached_at': '2025-03-11T18:53:41.000000', 'detached_at': '', 'volume_id': '4db1d8c0-6584-433c-a0b6-6f117e112fa3', 'serial': '4db1d8c0-6584-433c-a0b6-6f117e112fa3'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1620.373259] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1620.373544] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e1c1fc-362d-4144-851f-e236d1c69195 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.379738] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1620.379953] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-792d3b97-df17-428d-a7ce-c9bb243148a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.054548] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1621.054793] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1621.054963] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleting the datastore file [datastore1] 0f078ed3-d253-4bc4-901c-3c84027392b4 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1621.055243] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a091233a-dcb1-4dcd-8f8c-f7d1254ebee8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.061527] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1621.061527] env[68437]: value = "task-2945353" [ 1621.061527] env[68437]: _type = "Task" [ 1621.061527] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.069201] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.570837] env[68437]: DEBUG oslo_vmware.api [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157928} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.571264] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1621.571264] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1621.571402] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1621.571579] env[68437]: INFO nova.compute.manager [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Took 2.86 seconds to destroy the instance on the hypervisor. [ 1621.571829] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1621.572024] env[68437]: DEBUG nova.compute.manager [-] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1621.572119] env[68437]: DEBUG nova.network.neutron [-] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1622.045099] env[68437]: DEBUG nova.compute.manager [req-98a804db-cd71-47d2-9401-88e5b1b78d2d req-20984087-3d01-46d5-864f-a1e6ea26040d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Received event network-vif-deleted-fcbc43b1-2f65-489a-a9b4-5b1fa5f71458 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1622.045294] env[68437]: INFO nova.compute.manager [req-98a804db-cd71-47d2-9401-88e5b1b78d2d req-20984087-3d01-46d5-864f-a1e6ea26040d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Neutron deleted interface fcbc43b1-2f65-489a-a9b4-5b1fa5f71458; detaching it from the instance and deleting it from the info cache [ 1622.045471] env[68437]: DEBUG nova.network.neutron [req-98a804db-cd71-47d2-9401-88e5b1b78d2d req-20984087-3d01-46d5-864f-a1e6ea26040d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.486472] env[68437]: DEBUG nova.network.neutron [-] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.547797] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-273f1394-b5c6-46b5-802d-6acbec05fe59 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.557741] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efab82f-a3a8-47ee-ba09-a4c23f43696f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.583426] env[68437]: DEBUG nova.compute.manager [req-98a804db-cd71-47d2-9401-88e5b1b78d2d req-20984087-3d01-46d5-864f-a1e6ea26040d service nova] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Detach interface failed, port_id=fcbc43b1-2f65-489a-a9b4-5b1fa5f71458, reason: Instance 0f078ed3-d253-4bc4-901c-3c84027392b4 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1622.989109] env[68437]: INFO nova.compute.manager [-] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Took 1.42 seconds to deallocate network for instance. [ 1623.286106] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Volume attach. Driver type: vmdk {{(pid=68437) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1623.286377] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591185', 'volume_id': '385465c8-d52f-4f5c-902a-a6b4468fd163', 'name': 'volume-385465c8-d52f-4f5c-902a-a6b4468fd163', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a5ce6701-f5d2-4eb7-9d6c-3ace121de308', 'attached_at': '', 'detached_at': '', 'volume_id': '385465c8-d52f-4f5c-902a-a6b4468fd163', 'serial': '385465c8-d52f-4f5c-902a-a6b4468fd163'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1623.287352] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6988dc-1062-455c-93e4-05a380b5b190 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.304550] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a3065e-0a99-4747-916c-a31131374d56 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.331488] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] volume-385465c8-d52f-4f5c-902a-a6b4468fd163/volume-385465c8-d52f-4f5c-902a-a6b4468fd163.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1623.331759] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5014570-aed8-41a1-982a-b617ba21828f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.349936] env[68437]: DEBUG oslo_vmware.api [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1623.349936] env[68437]: value = "task-2945354" [ 1623.349936] env[68437]: _type = "Task" [ 1623.349936] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.357414] env[68437]: DEBUG oslo_vmware.api [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945354, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.530062] env[68437]: INFO nova.compute.manager [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Took 0.54 seconds to detach 1 volumes for instance. [ 1623.859738] env[68437]: DEBUG oslo_vmware.api [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945354, 'name': ReconfigVM_Task, 'duration_secs': 0.343723} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.860167] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfigured VM instance instance-0000007c to attach disk [datastore1] volume-385465c8-d52f-4f5c-902a-a6b4468fd163/volume-385465c8-d52f-4f5c-902a-a6b4468fd163.vmdk or device None with type thin {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1623.864635] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ad57966-c753-4116-a712-835bf19815f6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.879668] env[68437]: DEBUG oslo_vmware.api [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1623.879668] env[68437]: value = "task-2945355" [ 1623.879668] env[68437]: _type = "Task" [ 1623.879668] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.887334] env[68437]: DEBUG oslo_vmware.api [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945355, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.036765] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1624.037095] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1624.037300] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1624.059537] env[68437]: INFO nova.scheduler.client.report [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted allocations for instance 0f078ed3-d253-4bc4-901c-3c84027392b4 [ 1624.389261] env[68437]: DEBUG oslo_vmware.api [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945355, 'name': ReconfigVM_Task, 'duration_secs': 0.139598} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.390080] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591185', 'volume_id': '385465c8-d52f-4f5c-902a-a6b4468fd163', 'name': 'volume-385465c8-d52f-4f5c-902a-a6b4468fd163', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a5ce6701-f5d2-4eb7-9d6c-3ace121de308', 'attached_at': '', 'detached_at': '', 'volume_id': '385465c8-d52f-4f5c-902a-a6b4468fd163', 'serial': '385465c8-d52f-4f5c-902a-a6b4468fd163'} {{(pid=68437) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1624.567086] env[68437]: DEBUG oslo_concurrency.lockutils [None req-a870359d-f1ed-49e9-9294-a3f3c4f4de13 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "0f078ed3-d253-4bc4-901c-3c84027392b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.358s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1625.428579] env[68437]: DEBUG nova.objects.instance [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid a5ce6701-f5d2-4eb7-9d6c-3ace121de308 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1625.460111] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1625.460346] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1625.933870] env[68437]: DEBUG oslo_concurrency.lockutils [None req-3f5c29a7-0ebb-4a8e-920d-efd5ba62bef0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.254s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1625.962654] env[68437]: DEBUG nova.compute.manager [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1626.221153] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1626.221391] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1626.484594] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1626.484912] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1626.486464] env[68437]: INFO nova.compute.claims [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1626.724621] env[68437]: INFO nova.compute.manager [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Detaching volume 1b0d0953-a048-4d0d-87b0-d97e7ed651b9 [ 1626.755102] env[68437]: INFO nova.virt.block_device [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Attempting to driver detach volume 1b0d0953-a048-4d0d-87b0-d97e7ed651b9 from mountpoint /dev/sdb [ 1626.755347] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1626.755536] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591184', 'volume_id': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'name': 'volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a5ce6701-f5d2-4eb7-9d6c-3ace121de308', 'attached_at': '', 'detached_at': '', 'volume_id': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'serial': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1626.756431] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5996bf9f-0168-493d-b2ed-9f95d41fc9f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.781280] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e492819a-3a26-4afe-9187-f8794678fed1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.788059] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c8fe23-3390-411e-a834-c5867d1c80e7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.809705] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a420739b-47e4-4d35-b586-1164b89a8f78 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.824944] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] The volume has not been displaced from its original location: [datastore1] volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9/volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1626.830098] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1626.830373] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac61bece-72c5-4ac3-bdbe-d7e752e220b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.847487] env[68437]: DEBUG oslo_vmware.api [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1626.847487] env[68437]: value = "task-2945356" [ 1626.847487] env[68437]: _type = "Task" [ 1626.847487] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.854745] env[68437]: DEBUG oslo_vmware.api [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.357587] env[68437]: DEBUG oslo_vmware.api [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945356, 'name': ReconfigVM_Task, 'duration_secs': 0.242288} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.359054] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1627.362731] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2c609f7-bcd9-4709-83ab-a23d8cc831d1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.376673] env[68437]: DEBUG oslo_vmware.api [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1627.376673] env[68437]: value = "task-2945357" [ 1627.376673] env[68437]: _type = "Task" [ 1627.376673] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.384261] env[68437]: DEBUG oslo_vmware.api [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.531811] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12774ad-c264-41c4-b942-ce6189524be3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.539036] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24493553-1f2e-4060-814a-626a58da4162 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.567809] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-067a2bc6-674d-44c6-bee8-a9f5abbb8ca4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.574125] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7051ef4-640a-434d-8f2c-5c1269af597a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.586219] env[68437]: DEBUG nova.compute.provider_tree [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.886079] env[68437]: DEBUG oslo_vmware.api [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945357, 'name': ReconfigVM_Task, 'duration_secs': 0.130942} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.886395] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591184', 'volume_id': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'name': 'volume-1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a5ce6701-f5d2-4eb7-9d6c-3ace121de308', 'attached_at': '', 'detached_at': '', 'volume_id': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9', 'serial': '1b0d0953-a048-4d0d-87b0-d97e7ed651b9'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1628.089808] env[68437]: DEBUG nova.scheduler.client.report [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1628.425053] env[68437]: DEBUG nova.objects.instance [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid a5ce6701-f5d2-4eb7-9d6c-3ace121de308 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1628.594216] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.109s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1628.594755] env[68437]: DEBUG nova.compute.manager [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1629.099850] env[68437]: DEBUG nova.compute.utils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1629.101273] env[68437]: DEBUG nova.compute.manager [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1629.101445] env[68437]: DEBUG nova.network.neutron [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1629.151582] env[68437]: DEBUG nova.policy [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5f65dac5fd04c59b33cb454ee1c3e2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa08e0c3081143cbb8f4d00d7e5cf222', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1629.424518] env[68437]: DEBUG nova.network.neutron [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Successfully created port: 4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1629.433098] env[68437]: DEBUG oslo_concurrency.lockutils [None req-34a25651-f119-45b1-80ab-8e1f005b4022 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.212s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1629.450835] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1629.451071] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1629.604408] env[68437]: DEBUG nova.compute.manager [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1629.954858] env[68437]: INFO nova.compute.manager [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Detaching volume 385465c8-d52f-4f5c-902a-a6b4468fd163 [ 1629.988863] env[68437]: INFO nova.virt.block_device [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Attempting to driver detach volume 385465c8-d52f-4f5c-902a-a6b4468fd163 from mountpoint /dev/sdc [ 1629.989115] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Volume detach. Driver type: vmdk {{(pid=68437) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1629.989305] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591185', 'volume_id': '385465c8-d52f-4f5c-902a-a6b4468fd163', 'name': 'volume-385465c8-d52f-4f5c-902a-a6b4468fd163', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a5ce6701-f5d2-4eb7-9d6c-3ace121de308', 'attached_at': '', 'detached_at': '', 'volume_id': '385465c8-d52f-4f5c-902a-a6b4468fd163', 'serial': '385465c8-d52f-4f5c-902a-a6b4468fd163'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1629.990231] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dfd2a2-3105-43bb-b5a2-02f22a161b53 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.013371] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30e4cdd-6688-4883-807e-c67c15b950e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.019949] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3809cf5-0e37-4e7a-b93d-5e3f38a6fd88 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.039656] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2dfec2c-6374-4a3c-8f1f-c71af6a9681e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.054064] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] The volume has not been displaced from its original location: [datastore1] volume-385465c8-d52f-4f5c-902a-a6b4468fd163/volume-385465c8-d52f-4f5c-902a-a6b4468fd163.vmdk. No consolidation needed. {{(pid=68437) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1630.059138] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfiguring VM instance instance-0000007c to detach disk 2002 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1630.059408] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-011c9486-85a9-4a14-86a6-4c86475583bb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.076857] env[68437]: DEBUG oslo_vmware.api [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1630.076857] env[68437]: value = "task-2945358" [ 1630.076857] env[68437]: _type = "Task" [ 1630.076857] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.084279] env[68437]: DEBUG oslo_vmware.api [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945358, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.586331] env[68437]: DEBUG oslo_vmware.api [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945358, 'name': ReconfigVM_Task, 'duration_secs': 0.208888} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.586688] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Reconfigured VM instance instance-0000007c to detach disk 2002 {{(pid=68437) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1630.591221] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7acfffa-c88f-44ba-9c5e-f5415f894f36 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.606193] env[68437]: DEBUG oslo_vmware.api [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1630.606193] env[68437]: value = "task-2945359" [ 1630.606193] env[68437]: _type = "Task" [ 1630.606193] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.614271] env[68437]: DEBUG nova.compute.manager [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1630.616027] env[68437]: DEBUG oslo_vmware.api [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945359, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.640823] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1630.641087] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1630.641255] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1630.641463] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1630.641625] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1630.641821] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1630.642067] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1630.642248] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1630.642426] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1630.642606] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1630.642809] env[68437]: DEBUG nova.virt.hardware [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1630.643635] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca652aa-0264-4734-837b-3538dc5f45b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.651211] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5c6d69-6648-499b-b20b-4f2f2196b3ef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.967679] env[68437]: DEBUG nova.compute.manager [req-7f4c0780-bf1b-428a-b14f-53f546b7fdfc req-45516c1d-90e2-4c7a-9a7d-3ff96edad99d service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received event network-vif-plugged-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1630.967934] env[68437]: DEBUG oslo_concurrency.lockutils [req-7f4c0780-bf1b-428a-b14f-53f546b7fdfc req-45516c1d-90e2-4c7a-9a7d-3ff96edad99d service nova] Acquiring lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1630.968088] env[68437]: DEBUG oslo_concurrency.lockutils [req-7f4c0780-bf1b-428a-b14f-53f546b7fdfc req-45516c1d-90e2-4c7a-9a7d-3ff96edad99d service nova] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1630.968263] env[68437]: DEBUG oslo_concurrency.lockutils [req-7f4c0780-bf1b-428a-b14f-53f546b7fdfc req-45516c1d-90e2-4c7a-9a7d-3ff96edad99d service nova] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1630.968431] env[68437]: DEBUG nova.compute.manager [req-7f4c0780-bf1b-428a-b14f-53f546b7fdfc req-45516c1d-90e2-4c7a-9a7d-3ff96edad99d service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] No waiting events found dispatching network-vif-plugged-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1630.968595] env[68437]: WARNING nova.compute.manager [req-7f4c0780-bf1b-428a-b14f-53f546b7fdfc req-45516c1d-90e2-4c7a-9a7d-3ff96edad99d service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received unexpected event network-vif-plugged-4ccaa9e7-ec97-43d3-b9d6-609edc684273 for instance with vm_state building and task_state spawning. [ 1631.046515] env[68437]: DEBUG nova.network.neutron [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Successfully updated port: 4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1631.115809] env[68437]: DEBUG oslo_vmware.api [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945359, 'name': ReconfigVM_Task, 'duration_secs': 0.132898} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.116129] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-591185', 'volume_id': '385465c8-d52f-4f5c-902a-a6b4468fd163', 'name': 'volume-385465c8-d52f-4f5c-902a-a6b4468fd163', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a5ce6701-f5d2-4eb7-9d6c-3ace121de308', 'attached_at': '', 'detached_at': '', 'volume_id': '385465c8-d52f-4f5c-902a-a6b4468fd163', 'serial': '385465c8-d52f-4f5c-902a-a6b4468fd163'} {{(pid=68437) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1631.549602] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.549851] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1631.549939] env[68437]: DEBUG nova.network.neutron [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1631.658245] env[68437]: DEBUG nova.objects.instance [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'flavor' on Instance uuid a5ce6701-f5d2-4eb7-9d6c-3ace121de308 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1632.083558] env[68437]: DEBUG nova.network.neutron [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1632.204578] env[68437]: DEBUG nova.network.neutron [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating instance_info_cache with network_info: [{"id": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "address": "fa:16:3e:12:46:eb", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ccaa9e7-ec", "ovs_interfaceid": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.665286] env[68437]: DEBUG oslo_concurrency.lockutils [None req-8e065b4e-786c-4db8-bfd4-514400c7cbb3 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.214s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1632.707303] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1632.707559] env[68437]: DEBUG nova.compute.manager [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Instance network_info: |[{"id": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "address": "fa:16:3e:12:46:eb", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ccaa9e7-ec", "ovs_interfaceid": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1632.708048] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:46:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ccaa9e7-ec97-43d3-b9d6-609edc684273', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1632.715787] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1632.716619] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1632.716880] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c712a810-fb6a-4e09-b521-5f4d8f22e9cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.736382] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1632.736382] env[68437]: value = "task-2945360" [ 1632.736382] env[68437]: _type = "Task" [ 1632.736382] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.743601] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945360, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.993889] env[68437]: DEBUG nova.compute.manager [req-7183efad-e6cd-4cc9-a800-d1b09769b297 req-2e29d89a-af55-4e75-956f-8be78d818e71 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received event network-changed-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1632.994095] env[68437]: DEBUG nova.compute.manager [req-7183efad-e6cd-4cc9-a800-d1b09769b297 req-2e29d89a-af55-4e75-956f-8be78d818e71 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Refreshing instance network info cache due to event network-changed-4ccaa9e7-ec97-43d3-b9d6-609edc684273. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1632.994360] env[68437]: DEBUG oslo_concurrency.lockutils [req-7183efad-e6cd-4cc9-a800-d1b09769b297 req-2e29d89a-af55-4e75-956f-8be78d818e71 service nova] Acquiring lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.994548] env[68437]: DEBUG oslo_concurrency.lockutils [req-7183efad-e6cd-4cc9-a800-d1b09769b297 req-2e29d89a-af55-4e75-956f-8be78d818e71 service nova] Acquired lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1632.994730] env[68437]: DEBUG nova.network.neutron [req-7183efad-e6cd-4cc9-a800-d1b09769b297 req-2e29d89a-af55-4e75-956f-8be78d818e71 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Refreshing network info cache for port 4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1633.245815] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945360, 'name': CreateVM_Task, 'duration_secs': 0.335716} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.247600] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1633.248381] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.248559] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1633.248923] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1633.249203] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef527340-6c98-4412-b633-71a5f7387e79 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.255078] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1633.255078] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d32456-bd54-4248-6839-88e746ce8f17" [ 1633.255078] env[68437]: _type = "Task" [ 1633.255078] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.263703] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d32456-bd54-4248-6839-88e746ce8f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.669065] env[68437]: DEBUG nova.network.neutron [req-7183efad-e6cd-4cc9-a800-d1b09769b297 req-2e29d89a-af55-4e75-956f-8be78d818e71 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updated VIF entry in instance network info cache for port 4ccaa9e7-ec97-43d3-b9d6-609edc684273. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1633.669446] env[68437]: DEBUG nova.network.neutron [req-7183efad-e6cd-4cc9-a800-d1b09769b297 req-2e29d89a-af55-4e75-956f-8be78d818e71 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating instance_info_cache with network_info: [{"id": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "address": "fa:16:3e:12:46:eb", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ccaa9e7-ec", "ovs_interfaceid": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.765427] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d32456-bd54-4248-6839-88e746ce8f17, 'name': SearchDatastore_Task, 'duration_secs': 0.01063} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.765670] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1633.765900] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1633.766147] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.766295] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1633.766474] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1633.766719] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ceec3c6-5de1-4f02-a66f-063107cd62d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.774728] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1633.774944] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1633.775580] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdc8532b-b5e6-4f40-8aac-669aa59bca5b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.780338] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1633.780338] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52c168a5-df86-09f7-ba52-53095e8ff044" [ 1633.780338] env[68437]: _type = "Task" [ 1633.780338] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.787544] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c168a5-df86-09f7-ba52-53095e8ff044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.837384] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1633.837605] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1633.837812] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1633.837996] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1633.838174] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1633.840119] env[68437]: INFO nova.compute.manager [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Terminating instance [ 1634.171666] env[68437]: DEBUG oslo_concurrency.lockutils [req-7183efad-e6cd-4cc9-a800-d1b09769b297 req-2e29d89a-af55-4e75-956f-8be78d818e71 service nova] Releasing lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1634.291068] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52c168a5-df86-09f7-ba52-53095e8ff044, 'name': SearchDatastore_Task, 'duration_secs': 0.008987} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.291858] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9738f059-ae9a-45b3-b953-812e763f85b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.296551] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1634.296551] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526263d0-51f2-b603-f269-1b307a88e9d5" [ 1634.296551] env[68437]: _type = "Task" [ 1634.296551] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.303402] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526263d0-51f2-b603-f269-1b307a88e9d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.343108] env[68437]: DEBUG nova.compute.manager [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1634.343301] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1634.344025] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ad77e5-e62c-46a1-9c7e-f0c75c2ef0d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.350229] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1634.350437] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52d52b64-c0cb-4a28-946a-3a179b844793 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.356581] env[68437]: DEBUG oslo_vmware.api [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1634.356581] env[68437]: value = "task-2945361" [ 1634.356581] env[68437]: _type = "Task" [ 1634.356581] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.363617] env[68437]: DEBUG oslo_vmware.api [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945361, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.807214] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526263d0-51f2-b603-f269-1b307a88e9d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009522} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.807546] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1634.807705] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e/47ba3b7e-23dd-4967-9850-b99c1dca219e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1634.807966] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d8250dd-1c73-4500-8223-6035f4f7c19e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.815030] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1634.815030] env[68437]: value = "task-2945362" [ 1634.815030] env[68437]: _type = "Task" [ 1634.815030] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.822125] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.864735] env[68437]: DEBUG oslo_vmware.api [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945361, 'name': PowerOffVM_Task, 'duration_secs': 0.158256} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.864980] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1634.865175] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1634.865414] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45ff5986-aef1-4a21-8dc1-1ba8f0dea6bf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.930025] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1634.930273] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1634.930499] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Deleting the datastore file [datastore1] a5ce6701-f5d2-4eb7-9d6c-3ace121de308 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1634.930763] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-948d5f8a-81cd-4492-90ff-71004665e4ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.937544] env[68437]: DEBUG oslo_vmware.api [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for the task: (returnval){ [ 1634.937544] env[68437]: value = "task-2945364" [ 1634.937544] env[68437]: _type = "Task" [ 1634.937544] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.944478] env[68437]: DEBUG oslo_vmware.api [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.324494] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945362, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443577} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.324748] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e/47ba3b7e-23dd-4967-9850-b99c1dca219e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1635.324945] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1635.325223] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fa99f5bd-7f9b-44a1-abc6-bce0fb904013 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.330896] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1635.330896] env[68437]: value = "task-2945365" [ 1635.330896] env[68437]: _type = "Task" [ 1635.330896] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.337796] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945365, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.424049] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.424297] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.424457] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.424596] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1635.446325] env[68437]: DEBUG oslo_vmware.api [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Task: {'id': task-2945364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.3739} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.446569] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1635.446748] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1635.446916] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1635.447100] env[68437]: INFO nova.compute.manager [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1635.447341] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1635.447531] env[68437]: DEBUG nova.compute.manager [-] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1635.447629] env[68437]: DEBUG nova.network.neutron [-] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1635.841147] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945365, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063663} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.841147] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1635.841605] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809b1977-04e6-4e23-b4b3-ab05f06ba443 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.863345] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e/47ba3b7e-23dd-4967-9850-b99c1dca219e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1635.864840] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5eb8cb42-29ff-4248-a794-43090ef670bd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.883850] env[68437]: DEBUG nova.compute.manager [req-78898487-c341-49af-a047-3dd45f673b5c req-f5ebf29b-e8a6-4cd5-b9aa-554060de43fb service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Received event network-vif-deleted-d9010969-e902-406d-85bd-7b15f516910f {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1635.884060] env[68437]: INFO nova.compute.manager [req-78898487-c341-49af-a047-3dd45f673b5c req-f5ebf29b-e8a6-4cd5-b9aa-554060de43fb service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Neutron deleted interface d9010969-e902-406d-85bd-7b15f516910f; detaching it from the instance and deleting it from the info cache [ 1635.884237] env[68437]: DEBUG nova.network.neutron [req-78898487-c341-49af-a047-3dd45f673b5c req-f5ebf29b-e8a6-4cd5-b9aa-554060de43fb service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1635.890482] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1635.890482] env[68437]: value = "task-2945366" [ 1635.890482] env[68437]: _type = "Task" [ 1635.890482] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.899312] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945366, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.382910] env[68437]: DEBUG nova.network.neutron [-] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.388264] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3084ea5f-db4f-45b8-98b7-30780aa30101 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.399311] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8c14d5-d167-4174-aeea-71fa4ccca76f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.412686] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945366, 'name': ReconfigVM_Task, 'duration_secs': 0.28869} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.413310] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e/47ba3b7e-23dd-4967-9850-b99c1dca219e.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1636.413945] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96f1eceb-c855-4a32-9cfe-96cf2ff895ae {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.420858] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1636.420858] env[68437]: value = "task-2945367" [ 1636.420858] env[68437]: _type = "Task" [ 1636.420858] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.429282] env[68437]: DEBUG nova.compute.manager [req-78898487-c341-49af-a047-3dd45f673b5c req-f5ebf29b-e8a6-4cd5-b9aa-554060de43fb service nova] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Detach interface failed, port_id=d9010969-e902-406d-85bd-7b15f516910f, reason: Instance a5ce6701-f5d2-4eb7-9d6c-3ace121de308 could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1636.433886] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945367, 'name': Rename_Task} progress is 6%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.885898] env[68437]: INFO nova.compute.manager [-] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Took 1.44 seconds to deallocate network for instance. [ 1636.930610] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945367, 'name': Rename_Task, 'duration_secs': 0.141673} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.930880] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1636.931117] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d1c90a7-7399-4954-9c61-4f3a5973db8d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.937500] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1636.937500] env[68437]: value = "task-2945368" [ 1636.937500] env[68437]: _type = "Task" [ 1636.937500] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.944604] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945368, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.227361] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1637.230984] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1637.392893] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1637.393126] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1637.393351] env[68437]: DEBUG nova.objects.instance [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lazy-loading 'resources' on Instance uuid a5ce6701-f5d2-4eb7-9d6c-3ace121de308 {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1637.447025] env[68437]: DEBUG oslo_vmware.api [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945368, 'name': PowerOnVM_Task, 'duration_secs': 0.47648} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.447305] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1637.447501] env[68437]: INFO nova.compute.manager [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Took 6.83 seconds to spawn the instance on the hypervisor. [ 1637.447679] env[68437]: DEBUG nova.compute.manager [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1637.448427] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87bf8e8-1d6d-465c-8313-c89b0db9eba7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.938200] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94442f6a-7b4f-413e-b780-e48b27160fc9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.948980] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c83fb33-d765-48ad-856a-cd9e501680b7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.982951] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7394db29-285d-42f7-bce7-38b6f966c2d5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.987529] env[68437]: INFO nova.compute.manager [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Took 11.52 seconds to build instance. [ 1637.991938] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83980a4-5bbd-4f74-bf83-daf2b1d73bd7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.005948] env[68437]: DEBUG nova.compute.provider_tree [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1638.230997] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1638.231266] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1638.231436] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1638.404329] env[68437]: DEBUG nova.compute.manager [req-573c1ff7-3723-491d-9bd4-6c3dc2ce2033 req-5852e53f-e8ca-4a00-a4d1-e0ffc94f830a service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received event network-changed-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1638.404329] env[68437]: DEBUG nova.compute.manager [req-573c1ff7-3723-491d-9bd4-6c3dc2ce2033 req-5852e53f-e8ca-4a00-a4d1-e0ffc94f830a service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Refreshing instance network info cache due to event network-changed-4ccaa9e7-ec97-43d3-b9d6-609edc684273. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1638.404329] env[68437]: DEBUG oslo_concurrency.lockutils [req-573c1ff7-3723-491d-9bd4-6c3dc2ce2033 req-5852e53f-e8ca-4a00-a4d1-e0ffc94f830a service nova] Acquiring lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.404487] env[68437]: DEBUG oslo_concurrency.lockutils [req-573c1ff7-3723-491d-9bd4-6c3dc2ce2033 req-5852e53f-e8ca-4a00-a4d1-e0ffc94f830a service nova] Acquired lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1638.404567] env[68437]: DEBUG nova.network.neutron [req-573c1ff7-3723-491d-9bd4-6c3dc2ce2033 req-5852e53f-e8ca-4a00-a4d1-e0ffc94f830a service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Refreshing network info cache for port 4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1638.489160] env[68437]: DEBUG oslo_concurrency.lockutils [None req-c167cb3c-ab40-4df6-8d30-4c2ed2bc74c2 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.029s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1638.508840] env[68437]: DEBUG nova.scheduler.client.report [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1638.734961] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1639.013130] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1639.015970] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.282s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1639.016970] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1639.017162] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1639.018135] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162a278f-ba24-402e-b816-93426e987b19 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.029651] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07509c82-6ea3-4b82-962b-0f0dfe23e298 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.034008] env[68437]: INFO nova.scheduler.client.report [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Deleted allocations for instance a5ce6701-f5d2-4eb7-9d6c-3ace121de308 [ 1639.047948] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc2eabc-137c-48b4-adbe-4a3a271d2229 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.054310] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bc9070-987a-4b84-92e6-ebcb67f1b1b8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.082456] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180894MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1639.082621] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1639.082872] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1639.145781] env[68437]: DEBUG nova.network.neutron [req-573c1ff7-3723-491d-9bd4-6c3dc2ce2033 req-5852e53f-e8ca-4a00-a4d1-e0ffc94f830a service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updated VIF entry in instance network info cache for port 4ccaa9e7-ec97-43d3-b9d6-609edc684273. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1639.146151] env[68437]: DEBUG nova.network.neutron [req-573c1ff7-3723-491d-9bd4-6c3dc2ce2033 req-5852e53f-e8ca-4a00-a4d1-e0ffc94f830a service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating instance_info_cache with network_info: [{"id": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "address": "fa:16:3e:12:46:eb", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ccaa9e7-ec", "ovs_interfaceid": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.551158] env[68437]: DEBUG oslo_concurrency.lockutils [None req-e9b9a549-ad85-42ab-a9d7-5b46869c9dd0 tempest-AttachVolumeTestJSON-1750218865 tempest-AttachVolumeTestJSON-1750218865-project-member] Lock "a5ce6701-f5d2-4eb7-9d6c-3ace121de308" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.713s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1639.648835] env[68437]: DEBUG oslo_concurrency.lockutils [req-573c1ff7-3723-491d-9bd4-6c3dc2ce2033 req-5852e53f-e8ca-4a00-a4d1-e0ffc94f830a service nova] Releasing lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1640.110301] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 47ba3b7e-23dd-4967-9850-b99c1dca219e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1640.110612] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1640.110817] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1640.141467] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5187610c-7ec3-4310-ae20-c36277a10718 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.150789] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d3ed63-832b-4742-aec8-6b6e54fbfa43 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.182935] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195ae233-1080-4e19-a7da-7a5eb33418dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.190779] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0689ee-7c8b-460a-9c3d-6dbfc8bf95ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.204444] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1640.708078] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1641.213795] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1641.214197] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.131s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1674.976364] env[68437]: DEBUG oslo_concurrency.lockutils [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1674.976853] env[68437]: DEBUG oslo_concurrency.lockutils [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1674.976853] env[68437]: INFO nova.compute.manager [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Shelving [ 1675.987000] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1675.987472] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea2b2c21-54ed-4cf4-b321-1522bfd1dcc8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.995549] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1675.995549] env[68437]: value = "task-2945372" [ 1675.995549] env[68437]: _type = "Task" [ 1675.995549] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.003396] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945372, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.505348] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945372, 'name': PowerOffVM_Task, 'duration_secs': 0.201514} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.505658] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1676.506461] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb20c2c4-cc4c-4415-82a1-1c8cdb4f1eef {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.524195] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e50f55a-44ee-4d79-9c56-586a9ffe529c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.034159] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1677.034542] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e9c51fca-5de9-4333-a2ae-f1bd16f526e8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.042279] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1677.042279] env[68437]: value = "task-2945373" [ 1677.042279] env[68437]: _type = "Task" [ 1677.042279] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.050652] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945373, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.552640] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945373, 'name': CreateSnapshot_Task, 'duration_secs': 0.480066} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.552916] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1677.553705] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e58f54-98ee-435f-ac18-c4ae74665658 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.071315] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1678.071726] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f0e30499-1499-4635-92b1-5da9e867204b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.080411] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1678.080411] env[68437]: value = "task-2945374" [ 1678.080411] env[68437]: _type = "Task" [ 1678.080411] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.087823] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945374, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.590505] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945374, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.090952] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945374, 'name': CloneVM_Task, 'duration_secs': 0.940382} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.091382] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Created linked-clone VM from snapshot [ 1679.091949] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b93ecc-6946-4a9f-bd60-0d145e31ad12 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.100273] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Uploading image d20c2ebe-095f-44ce-b1ed-1751ce61ff88 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1679.120103] env[68437]: DEBUG oslo_vmware.rw_handles [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1679.120103] env[68437]: value = "vm-591188" [ 1679.120103] env[68437]: _type = "VirtualMachine" [ 1679.120103] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1679.120371] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4d4ac00d-78d1-4ada-98dc-f0ade2d40708 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.126012] env[68437]: DEBUG oslo_vmware.rw_handles [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease: (returnval){ [ 1679.126012] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bd97c2-5571-e915-1aad-d6776377d322" [ 1679.126012] env[68437]: _type = "HttpNfcLease" [ 1679.126012] env[68437]: } obtained for exporting VM: (result){ [ 1679.126012] env[68437]: value = "vm-591188" [ 1679.126012] env[68437]: _type = "VirtualMachine" [ 1679.126012] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1679.126358] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the lease: (returnval){ [ 1679.126358] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bd97c2-5571-e915-1aad-d6776377d322" [ 1679.126358] env[68437]: _type = "HttpNfcLease" [ 1679.126358] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1679.131740] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1679.131740] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bd97c2-5571-e915-1aad-d6776377d322" [ 1679.131740] env[68437]: _type = "HttpNfcLease" [ 1679.131740] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1679.634815] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1679.634815] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bd97c2-5571-e915-1aad-d6776377d322" [ 1679.634815] env[68437]: _type = "HttpNfcLease" [ 1679.634815] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1679.635179] env[68437]: DEBUG oslo_vmware.rw_handles [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1679.635179] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52bd97c2-5571-e915-1aad-d6776377d322" [ 1679.635179] env[68437]: _type = "HttpNfcLease" [ 1679.635179] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1679.635827] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6006d390-5e58-405b-805d-a2d9c68aa8eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.642312] env[68437]: DEBUG oslo_vmware.rw_handles [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249bf48-82a6-2822-e1a6-bbf7fcaa992a/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1679.642483] env[68437]: DEBUG oslo_vmware.rw_handles [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249bf48-82a6-2822-e1a6-bbf7fcaa992a/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1679.729663] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6813799c-aee1-4bcf-a531-f1a8680eee67 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.918559] env[68437]: DEBUG oslo_vmware.rw_handles [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249bf48-82a6-2822-e1a6-bbf7fcaa992a/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1686.919490] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d73f10f-3661-4b3b-b733-3d037724a4db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.925766] env[68437]: DEBUG oslo_vmware.rw_handles [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249bf48-82a6-2822-e1a6-bbf7fcaa992a/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1686.925934] env[68437]: ERROR oslo_vmware.rw_handles [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249bf48-82a6-2822-e1a6-bbf7fcaa992a/disk-0.vmdk due to incomplete transfer. [ 1686.926166] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9a72811c-cdd0-4e9d-ba8f-e95fa288530c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.933687] env[68437]: DEBUG oslo_vmware.rw_handles [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249bf48-82a6-2822-e1a6-bbf7fcaa992a/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1686.933879] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Uploaded image d20c2ebe-095f-44ce-b1ed-1751ce61ff88 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1686.936225] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1686.936454] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-23d26854-6017-4534-aa2b-d6e101b96841 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.942316] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1686.942316] env[68437]: value = "task-2945376" [ 1686.942316] env[68437]: _type = "Task" [ 1686.942316] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.949774] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945376, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.451637] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945376, 'name': Destroy_Task, 'duration_secs': 0.344965} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.451890] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Destroyed the VM [ 1687.452145] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1687.452385] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-334fe1fc-e514-4f23-8e51-f47a8c296300 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.458674] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1687.458674] env[68437]: value = "task-2945377" [ 1687.458674] env[68437]: _type = "Task" [ 1687.458674] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.465513] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945377, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.969162] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945377, 'name': RemoveSnapshot_Task, 'duration_secs': 0.340727} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.969496] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1687.969728] env[68437]: DEBUG nova.compute.manager [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1687.970497] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69894d30-c7b1-46b1-a294-f15a8a81f183 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.481874] env[68437]: INFO nova.compute.manager [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Shelve offloading [ 1688.985781] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1688.986198] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-130051e5-cb96-442a-a93e-b175e994f578 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.993997] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1688.993997] env[68437]: value = "task-2945378" [ 1688.993997] env[68437]: _type = "Task" [ 1688.993997] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.001469] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.504383] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1689.504609] env[68437]: DEBUG nova.compute.manager [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1689.505373] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7f12d7-3408-4d50-94b8-073b3b2dc5b3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.510480] env[68437]: DEBUG oslo_concurrency.lockutils [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.510647] env[68437]: DEBUG oslo_concurrency.lockutils [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1689.510812] env[68437]: DEBUG nova.network.neutron [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1690.205214] env[68437]: DEBUG nova.network.neutron [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating instance_info_cache with network_info: [{"id": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "address": "fa:16:3e:12:46:eb", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ccaa9e7-ec", "ovs_interfaceid": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.707979] env[68437]: DEBUG oslo_concurrency.lockutils [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1690.923898] env[68437]: DEBUG nova.compute.manager [req-e1c07671-21a3-4291-9ada-5a61c1a4c395 req-bf65ee68-8060-4b6d-90d8-bc63529b4303 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received event network-vif-unplugged-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1690.924107] env[68437]: DEBUG oslo_concurrency.lockutils [req-e1c07671-21a3-4291-9ada-5a61c1a4c395 req-bf65ee68-8060-4b6d-90d8-bc63529b4303 service nova] Acquiring lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1690.924326] env[68437]: DEBUG oslo_concurrency.lockutils [req-e1c07671-21a3-4291-9ada-5a61c1a4c395 req-bf65ee68-8060-4b6d-90d8-bc63529b4303 service nova] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1690.924495] env[68437]: DEBUG oslo_concurrency.lockutils [req-e1c07671-21a3-4291-9ada-5a61c1a4c395 req-bf65ee68-8060-4b6d-90d8-bc63529b4303 service nova] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1690.924744] env[68437]: DEBUG nova.compute.manager [req-e1c07671-21a3-4291-9ada-5a61c1a4c395 req-bf65ee68-8060-4b6d-90d8-bc63529b4303 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] No waiting events found dispatching network-vif-unplugged-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1690.924952] env[68437]: WARNING nova.compute.manager [req-e1c07671-21a3-4291-9ada-5a61c1a4c395 req-bf65ee68-8060-4b6d-90d8-bc63529b4303 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received unexpected event network-vif-unplugged-4ccaa9e7-ec97-43d3-b9d6-609edc684273 for instance with vm_state shelved and task_state shelving_offloading. [ 1691.019533] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1691.020475] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb18c11-5093-4678-910b-359466184c28 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.028151] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1691.028374] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0824231c-79f5-49dd-8ffe-e8270bd0854b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.095550] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1691.095815] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1691.095940] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleting the datastore file [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1691.096211] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2649000f-30aa-48a7-ad62-e407e30818b4 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.102965] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1691.102965] env[68437]: value = "task-2945380" [ 1691.102965] env[68437]: _type = "Task" [ 1691.102965] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.110341] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945380, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.612697] env[68437]: DEBUG oslo_vmware.api [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945380, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122129} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.613173] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1691.613173] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1691.613301] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1691.636944] env[68437]: INFO nova.scheduler.client.report [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted allocations for instance 47ba3b7e-23dd-4967-9850-b99c1dca219e [ 1692.141318] env[68437]: DEBUG oslo_concurrency.lockutils [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1692.141547] env[68437]: DEBUG oslo_concurrency.lockutils [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1692.141771] env[68437]: DEBUG nova.objects.instance [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'resources' on Instance uuid 47ba3b7e-23dd-4967-9850-b99c1dca219e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1692.645466] env[68437]: DEBUG nova.objects.instance [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'numa_topology' on Instance uuid 47ba3b7e-23dd-4967-9850-b99c1dca219e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1692.950297] env[68437]: DEBUG nova.compute.manager [req-d2860d5b-02b1-4383-b7a2-c3ac1b2011b0 req-bb47564c-4198-4398-b9a1-ddcf01edb846 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received event network-changed-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1692.950475] env[68437]: DEBUG nova.compute.manager [req-d2860d5b-02b1-4383-b7a2-c3ac1b2011b0 req-bb47564c-4198-4398-b9a1-ddcf01edb846 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Refreshing instance network info cache due to event network-changed-4ccaa9e7-ec97-43d3-b9d6-609edc684273. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1692.950695] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2860d5b-02b1-4383-b7a2-c3ac1b2011b0 req-bb47564c-4198-4398-b9a1-ddcf01edb846 service nova] Acquiring lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.950837] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2860d5b-02b1-4383-b7a2-c3ac1b2011b0 req-bb47564c-4198-4398-b9a1-ddcf01edb846 service nova] Acquired lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1692.951011] env[68437]: DEBUG nova.network.neutron [req-d2860d5b-02b1-4383-b7a2-c3ac1b2011b0 req-bb47564c-4198-4398-b9a1-ddcf01edb846 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Refreshing network info cache for port 4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1693.148040] env[68437]: DEBUG nova.objects.base [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Object Instance<47ba3b7e-23dd-4967-9850-b99c1dca219e> lazy-loaded attributes: resources,numa_topology {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1693.174191] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4da90d-e3dd-46c4-86ea-a345fe6022f8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.182239] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f650de-7d45-425a-ba4b-a5fc6efd509e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.218659] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64eefe37-2ad3-4e2f-be0f-9b7c695faefd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.225686] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9b455c-bd86-4d1c-b1b5-d47d955c7f89 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.238416] env[68437]: DEBUG nova.compute.provider_tree [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1693.655422] env[68437]: DEBUG nova.network.neutron [req-d2860d5b-02b1-4383-b7a2-c3ac1b2011b0 req-bb47564c-4198-4398-b9a1-ddcf01edb846 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updated VIF entry in instance network info cache for port 4ccaa9e7-ec97-43d3-b9d6-609edc684273. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1693.655796] env[68437]: DEBUG nova.network.neutron [req-d2860d5b-02b1-4383-b7a2-c3ac1b2011b0 req-bb47564c-4198-4398-b9a1-ddcf01edb846 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating instance_info_cache with network_info: [{"id": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "address": "fa:16:3e:12:46:eb", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": null, "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4ccaa9e7-ec", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1693.741945] env[68437]: DEBUG nova.scheduler.client.report [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1693.832235] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1694.158940] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2860d5b-02b1-4383-b7a2-c3ac1b2011b0 req-bb47564c-4198-4398-b9a1-ddcf01edb846 service nova] Releasing lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1694.246219] env[68437]: DEBUG oslo_concurrency.lockutils [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1694.754255] env[68437]: DEBUG oslo_concurrency.lockutils [None req-46646cbd-9971-48f4-8b60-32f93f035b66 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 19.777s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1694.754946] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.923s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1694.755146] env[68437]: INFO nova.compute.manager [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Unshelving [ 1695.213529] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1695.226231] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1695.729922] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1695.730168] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1695.777690] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1695.777982] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1695.778211] env[68437]: DEBUG nova.objects.instance [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'pci_requests' on Instance uuid 47ba3b7e-23dd-4967-9850-b99c1dca219e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1696.230863] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1696.282115] env[68437]: DEBUG nova.objects.instance [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'numa_topology' on Instance uuid 47ba3b7e-23dd-4967-9850-b99c1dca219e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1696.784803] env[68437]: INFO nova.compute.claims [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1697.225920] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.230557] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.821329] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ec6aaf-c336-4265-b9f2-7addba80f1a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.829126] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba02539-126c-45b6-8b66-ffcb201235ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.857676] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b672dda-80b9-4362-bfcd-ec3e5a625a93 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.864080] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f764e01-4032-4a42-89b8-7758c5afbe76 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.876493] env[68437]: DEBUG nova.compute.provider_tree [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1698.379257] env[68437]: DEBUG nova.scheduler.client.report [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1698.884981] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.107s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1698.913156] env[68437]: INFO nova.network.neutron [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating port 4ccaa9e7-ec97-43d3-b9d6-609edc684273 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1699.231164] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1699.231399] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.230984] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1700.290462] env[68437]: DEBUG nova.compute.manager [req-b9fe10f8-c2a3-4d7c-bc85-c9dbba9c94da req-64cdb3bd-3b44-4fc7-b2bc-727ca297485c service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received event network-vif-plugged-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1700.290686] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9fe10f8-c2a3-4d7c-bc85-c9dbba9c94da req-64cdb3bd-3b44-4fc7-b2bc-727ca297485c service nova] Acquiring lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1700.290876] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9fe10f8-c2a3-4d7c-bc85-c9dbba9c94da req-64cdb3bd-3b44-4fc7-b2bc-727ca297485c service nova] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1700.291270] env[68437]: DEBUG oslo_concurrency.lockutils [req-b9fe10f8-c2a3-4d7c-bc85-c9dbba9c94da req-64cdb3bd-3b44-4fc7-b2bc-727ca297485c service nova] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1700.291567] env[68437]: DEBUG nova.compute.manager [req-b9fe10f8-c2a3-4d7c-bc85-c9dbba9c94da req-64cdb3bd-3b44-4fc7-b2bc-727ca297485c service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] No waiting events found dispatching network-vif-plugged-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1700.291567] env[68437]: WARNING nova.compute.manager [req-b9fe10f8-c2a3-4d7c-bc85-c9dbba9c94da req-64cdb3bd-3b44-4fc7-b2bc-727ca297485c service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received unexpected event network-vif-plugged-4ccaa9e7-ec97-43d3-b9d6-609edc684273 for instance with vm_state shelved_offloaded and task_state spawning. [ 1700.373698] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1700.373904] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1700.374166] env[68437]: DEBUG nova.network.neutron [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1700.734553] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1700.734798] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1700.734959] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1700.735224] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1700.736126] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8646bbc-a46c-409e-99cb-c42f5a4b245d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.744206] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa62de8-5797-4da8-b844-2f6fa916c1ac {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.757657] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d9fa4f-a124-420d-b9cc-d131353ee179 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.763564] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d888345a-66ff-40be-b462-d0bbf905c829 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.790868] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180905MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1700.790998] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1700.791205] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1701.061548] env[68437]: DEBUG nova.network.neutron [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating instance_info_cache with network_info: [{"id": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "address": "fa:16:3e:12:46:eb", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ccaa9e7-ec", "ovs_interfaceid": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.563779] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1701.592026] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='38c850412bb623bb43a2954b20560fda',container_format='bare',created_at=2025-03-11T18:54:46Z,direct_url=,disk_format='vmdk',id=d20c2ebe-095f-44ce-b1ed-1751ce61ff88,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1084383843-shelved',owner='aa08e0c3081143cbb8f4d00d7e5cf222',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-03-11T18:54:58Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1701.592026] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.592026] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1701.592026] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.592026] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1701.592026] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1701.592736] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1701.593142] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1701.593484] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1701.593802] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1701.595474] env[68437]: DEBUG nova.virt.hardware [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1701.595474] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98b067c-a646-4fa4-95b0-3bac891f3938 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.602485] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21703e9e-3bc7-43d8-80d6-692b7029d28a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.615234] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:46:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ccaa9e7-ec97-43d3-b9d6-609edc684273', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1701.622416] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1701.622629] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1701.622816] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-25049462-8afc-4ccf-a591-3c8b782c7ac8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.641858] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1701.641858] env[68437]: value = "task-2945381" [ 1701.641858] env[68437]: _type = "Task" [ 1701.641858] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.648580] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945381, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.815047] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 47ba3b7e-23dd-4967-9850-b99c1dca219e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1701.815273] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1701.815419] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1701.840665] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be53746-7248-4c90-9c41-db6ceaa1d4d7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.848272] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e28c7bc-c659-403c-b049-a05eb044df2b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.877688] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00f7a31-215c-49e6-8895-3f9946fcf7fb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.885075] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f20c431-17f6-46c2-8896-2505ec3dc2d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.898366] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1702.152447] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945381, 'name': CreateVM_Task, 'duration_secs': 0.320828} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.152675] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1702.153277] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.153453] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1702.153878] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1702.154186] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32bef297-3242-4cfc-a81f-6d9295d5decd {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.159198] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1702.159198] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5288406e-419c-b7db-eb54-cd6538a629af" [ 1702.159198] env[68437]: _type = "Task" [ 1702.159198] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.166684] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]5288406e-419c-b7db-eb54-cd6538a629af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.314544] env[68437]: DEBUG nova.compute.manager [req-8b40fef6-3172-43ac-81df-27b48b4503ee req-37bab54d-4e1e-4bd2-a0ae-3fd9fa008a84 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received event network-changed-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1702.314772] env[68437]: DEBUG nova.compute.manager [req-8b40fef6-3172-43ac-81df-27b48b4503ee req-37bab54d-4e1e-4bd2-a0ae-3fd9fa008a84 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Refreshing instance network info cache due to event network-changed-4ccaa9e7-ec97-43d3-b9d6-609edc684273. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1702.314956] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b40fef6-3172-43ac-81df-27b48b4503ee req-37bab54d-4e1e-4bd2-a0ae-3fd9fa008a84 service nova] Acquiring lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.315171] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b40fef6-3172-43ac-81df-27b48b4503ee req-37bab54d-4e1e-4bd2-a0ae-3fd9fa008a84 service nova] Acquired lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1702.315351] env[68437]: DEBUG nova.network.neutron [req-8b40fef6-3172-43ac-81df-27b48b4503ee req-37bab54d-4e1e-4bd2-a0ae-3fd9fa008a84 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Refreshing network info cache for port 4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1702.401467] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1702.668833] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1702.669229] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Processing image d20c2ebe-095f-44ce-b1ed-1751ce61ff88 {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1702.669345] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88/d20c2ebe-095f-44ce-b1ed-1751ce61ff88.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.669475] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88/d20c2ebe-095f-44ce-b1ed-1751ce61ff88.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1702.669652] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1702.669945] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5c01890-82db-4bb7-9a87-2585dbecd0a3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.677545] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1702.677716] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1702.678401] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a2cbd1f-0a1d-422d-827c-e5d7e40a9262 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.683267] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1702.683267] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]521a1bfc-ab45-e67e-bf86-aa3e6dd6299e" [ 1702.683267] env[68437]: _type = "Task" [ 1702.683267] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.689923] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]521a1bfc-ab45-e67e-bf86-aa3e6dd6299e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.905617] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1702.905830] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1703.007885] env[68437]: DEBUG nova.network.neutron [req-8b40fef6-3172-43ac-81df-27b48b4503ee req-37bab54d-4e1e-4bd2-a0ae-3fd9fa008a84 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updated VIF entry in instance network info cache for port 4ccaa9e7-ec97-43d3-b9d6-609edc684273. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1703.008254] env[68437]: DEBUG nova.network.neutron [req-8b40fef6-3172-43ac-81df-27b48b4503ee req-37bab54d-4e1e-4bd2-a0ae-3fd9fa008a84 service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating instance_info_cache with network_info: [{"id": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "address": "fa:16:3e:12:46:eb", "network": {"id": "6215637a-f798-411f-9678-e79dbb77222f", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-686283147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa08e0c3081143cbb8f4d00d7e5cf222", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ccaa9e7-ec", "ovs_interfaceid": "4ccaa9e7-ec97-43d3-b9d6-609edc684273", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.193030] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Preparing fetch location {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1703.193316] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Fetch image to [datastore1] OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7/OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7.vmdk {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1703.193505] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Downloading stream optimized image d20c2ebe-095f-44ce-b1ed-1751ce61ff88 to [datastore1] OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7/OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7.vmdk on the data store datastore1 as vApp {{(pid=68437) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1703.193678] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Downloading image file data d20c2ebe-095f-44ce-b1ed-1751ce61ff88 to the ESX as VM named 'OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7' {{(pid=68437) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1703.258014] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1703.258014] env[68437]: value = "resgroup-9" [ 1703.258014] env[68437]: _type = "ResourcePool" [ 1703.258014] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1703.258294] env[68437]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-ed82efc9-9e05-4bc3-b08d-d031dce805cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.279760] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease: (returnval){ [ 1703.279760] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5265f8b0-47da-c782-9e2c-5dfcda7ded69" [ 1703.279760] env[68437]: _type = "HttpNfcLease" [ 1703.279760] env[68437]: } obtained for vApp import into resource pool (val){ [ 1703.279760] env[68437]: value = "resgroup-9" [ 1703.279760] env[68437]: _type = "ResourcePool" [ 1703.279760] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1703.280201] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the lease: (returnval){ [ 1703.280201] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5265f8b0-47da-c782-9e2c-5dfcda7ded69" [ 1703.280201] env[68437]: _type = "HttpNfcLease" [ 1703.280201] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1703.285706] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1703.285706] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5265f8b0-47da-c782-9e2c-5dfcda7ded69" [ 1703.285706] env[68437]: _type = "HttpNfcLease" [ 1703.285706] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1703.511401] env[68437]: DEBUG oslo_concurrency.lockutils [req-8b40fef6-3172-43ac-81df-27b48b4503ee req-37bab54d-4e1e-4bd2-a0ae-3fd9fa008a84 service nova] Releasing lock "refresh_cache-47ba3b7e-23dd-4967-9850-b99c1dca219e" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1703.788965] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1703.788965] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5265f8b0-47da-c782-9e2c-5dfcda7ded69" [ 1703.788965] env[68437]: _type = "HttpNfcLease" [ 1703.788965] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1703.789336] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1703.789336] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]5265f8b0-47da-c782-9e2c-5dfcda7ded69" [ 1703.789336] env[68437]: _type = "HttpNfcLease" [ 1703.789336] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1703.789998] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186129e0-db67-4ef2-8a5c-f7ef014c1131 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.796608] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5344b-1e58-8ff9-a843-25f65d4bb80f/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1703.796778] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5344b-1e58-8ff9-a843-25f65d4bb80f/disk-0.vmdk. {{(pid=68437) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1703.859138] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c04eb532-e624-4d8c-9c84-10566c63cb4b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.934240] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Completed reading data from the image iterator. {{(pid=68437) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1704.934704] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5344b-1e58-8ff9-a843-25f65d4bb80f/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1704.935420] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8f4ecd-5f48-4bf1-9bab-0ef03070eac5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.941690] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5344b-1e58-8ff9-a843-25f65d4bb80f/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1704.941874] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5344b-1e58-8ff9-a843-25f65d4bb80f/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1704.942104] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-44a8b440-5870-4e39-a3a0-e0d8106280f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.127752] env[68437]: DEBUG oslo_vmware.rw_handles [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5344b-1e58-8ff9-a843-25f65d4bb80f/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1705.127934] env[68437]: INFO nova.virt.vmwareapi.images [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Downloaded image file data d20c2ebe-095f-44ce-b1ed-1751ce61ff88 [ 1705.128792] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a5401a-80f9-4c45-b9a5-32fb769af59e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.144372] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8fe0950e-23ae-4747-9064-80994e5f9831 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.184233] env[68437]: INFO nova.virt.vmwareapi.images [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] The imported VM was unregistered [ 1705.186663] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Caching image {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1705.186894] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88 {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1705.187162] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f910d326-715e-45c1-a3d3-24b9246c37a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.197841] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Created directory with path [datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88 {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1705.198017] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7/OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7.vmdk to [datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88/d20c2ebe-095f-44ce-b1ed-1751ce61ff88.vmdk. {{(pid=68437) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1705.198262] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-398c0b87-dbad-4efb-b5c7-63a2c261b39c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.205373] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1705.205373] env[68437]: value = "task-2945384" [ 1705.205373] env[68437]: _type = "Task" [ 1705.205373] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.212473] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945384, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.715192] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945384, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.215875] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945384, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.717338] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945384, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.217519] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945384, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.719078] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945384, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.176588} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.719698] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7/OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7.vmdk to [datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88/d20c2ebe-095f-44ce-b1ed-1751ce61ff88.vmdk. [ 1707.719698] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Cleaning up location [datastore1] OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7 {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1707.719698] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_9d4c13bc-f853-4783-b28c-a7634a987be7 {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1707.719946] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e23f74ad-5ca3-42f3-b11e-c98c70db36b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.725636] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1707.725636] env[68437]: value = "task-2945385" [ 1707.725636] env[68437]: _type = "Task" [ 1707.725636] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.732984] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.235124] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036156} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.235441] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1708.235552] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88/d20c2ebe-095f-44ce-b1ed-1751ce61ff88.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1708.235830] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88/d20c2ebe-095f-44ce-b1ed-1751ce61ff88.vmdk to [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e/47ba3b7e-23dd-4967-9850-b99c1dca219e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1708.236093] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-526b2b28-ef24-4cf3-9f5a-0a0cc4ae1074 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.242105] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1708.242105] env[68437]: value = "task-2945386" [ 1708.242105] env[68437]: _type = "Task" [ 1708.242105] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.248778] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945386, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.752853] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945386, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.253432] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945386, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.755607] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945386, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.256058] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945386, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.757483] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945386, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.116819} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.757756] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d20c2ebe-095f-44ce-b1ed-1751ce61ff88/d20c2ebe-095f-44ce-b1ed-1751ce61ff88.vmdk to [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e/47ba3b7e-23dd-4967-9850-b99c1dca219e.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1710.758522] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1785b4e2-72ee-48c3-979b-792b57345807 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.780220] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e/47ba3b7e-23dd-4967-9850-b99c1dca219e.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1710.780421] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ef70f59-deb7-4adf-97d9-f35bf90927d0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.799331] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1710.799331] env[68437]: value = "task-2945387" [ 1710.799331] env[68437]: _type = "Task" [ 1710.799331] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.806214] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945387, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.309515] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945387, 'name': ReconfigVM_Task, 'duration_secs': 0.278773} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.309962] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e/47ba3b7e-23dd-4967-9850-b99c1dca219e.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1711.310419] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-292fa2fb-a1d2-4222-b7e1-f435ef846a57 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.316356] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1711.316356] env[68437]: value = "task-2945388" [ 1711.316356] env[68437]: _type = "Task" [ 1711.316356] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.323523] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945388, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.826014] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945388, 'name': Rename_Task, 'duration_secs': 0.143426} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.826293] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1711.826528] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f716714-6a7d-4fea-8c17-95fc4376dd54 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.832591] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1711.832591] env[68437]: value = "task-2945389" [ 1711.832591] env[68437]: _type = "Task" [ 1711.832591] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.839571] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945389, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.342828] env[68437]: DEBUG oslo_vmware.api [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945389, 'name': PowerOnVM_Task, 'duration_secs': 0.420623} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.343157] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1712.430359] env[68437]: DEBUG nova.compute.manager [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1712.431224] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8113063-0725-4ad7-ad7c-b8e60a552a33 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.949849] env[68437]: DEBUG oslo_concurrency.lockutils [None req-4ae00847-f01b-4165-b5f3-93977847df51 tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.195s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1713.265960] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1713.266250] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1713.266462] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1713.266645] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1713.266810] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1713.268830] env[68437]: INFO nova.compute.manager [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Terminating instance [ 1713.772999] env[68437]: DEBUG nova.compute.manager [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1713.773552] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1713.774209] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fdf60e-6d12-4acb-8b72-cbc4ac9f1846 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.782665] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1713.782945] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c20141a0-037b-408f-8324-f77c0082cdfc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.789062] env[68437]: DEBUG oslo_vmware.api [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1713.789062] env[68437]: value = "task-2945390" [ 1713.789062] env[68437]: _type = "Task" [ 1713.789062] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.796783] env[68437]: DEBUG oslo_vmware.api [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945390, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.299136] env[68437]: DEBUG oslo_vmware.api [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945390, 'name': PowerOffVM_Task, 'duration_secs': 0.171456} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.299426] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1714.299598] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1714.299846] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41288b78-bd2f-4046-894f-b1cb89f8a4e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.364178] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1714.364449] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1714.364655] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleting the datastore file [datastore1] 47ba3b7e-23dd-4967-9850-b99c1dca219e {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1714.364912] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12a58c78-93f1-4495-a8c3-a89e0ac32611 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.372619] env[68437]: DEBUG oslo_vmware.api [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for the task: (returnval){ [ 1714.372619] env[68437]: value = "task-2945392" [ 1714.372619] env[68437]: _type = "Task" [ 1714.372619] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.380327] env[68437]: DEBUG oslo_vmware.api [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.881993] env[68437]: DEBUG oslo_vmware.api [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Task: {'id': task-2945392, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131701} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.882397] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1714.882441] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1714.882619] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1714.882795] env[68437]: INFO nova.compute.manager [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1714.883046] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1714.883242] env[68437]: DEBUG nova.compute.manager [-] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1714.883396] env[68437]: DEBUG nova.network.neutron [-] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1715.306420] env[68437]: DEBUG nova.compute.manager [req-03f75216-65e3-4198-8919-75e8078b4c3b req-4d869e5e-e3c7-452d-b488-1ea334aa762e service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Received event network-vif-deleted-4ccaa9e7-ec97-43d3-b9d6-609edc684273 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1715.306705] env[68437]: INFO nova.compute.manager [req-03f75216-65e3-4198-8919-75e8078b4c3b req-4d869e5e-e3c7-452d-b488-1ea334aa762e service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Neutron deleted interface 4ccaa9e7-ec97-43d3-b9d6-609edc684273; detaching it from the instance and deleting it from the info cache [ 1715.306832] env[68437]: DEBUG nova.network.neutron [req-03f75216-65e3-4198-8919-75e8078b4c3b req-4d869e5e-e3c7-452d-b488-1ea334aa762e service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.791491] env[68437]: DEBUG nova.network.neutron [-] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.809779] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38561249-7cf7-43e4-8276-704f7f9d5ae3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.819897] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b984a75-4e0d-4da5-8e4d-b5f098f64205 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.845356] env[68437]: DEBUG nova.compute.manager [req-03f75216-65e3-4198-8919-75e8078b4c3b req-4d869e5e-e3c7-452d-b488-1ea334aa762e service nova] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Detach interface failed, port_id=4ccaa9e7-ec97-43d3-b9d6-609edc684273, reason: Instance 47ba3b7e-23dd-4967-9850-b99c1dca219e could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1716.294105] env[68437]: INFO nova.compute.manager [-] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Took 1.41 seconds to deallocate network for instance. [ 1716.802048] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1716.802048] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1716.802048] env[68437]: DEBUG nova.objects.instance [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lazy-loading 'resources' on Instance uuid 47ba3b7e-23dd-4967-9850-b99c1dca219e {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1717.335656] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b76f4a0-d9c8-4381-bfc8-db85537b2c53 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.343076] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cecedac-eabb-4fd6-b2ac-424750b7e887 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.373288] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c18bf3-9c71-4aba-b310-28ebcc180468 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.379967] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb5561b-a64f-414b-9223-517038f501b1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.393811] env[68437]: DEBUG nova.compute.provider_tree [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1717.897419] env[68437]: DEBUG nova.scheduler.client.report [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1718.403244] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.602s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1718.429224] env[68437]: INFO nova.scheduler.client.report [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Deleted allocations for instance 47ba3b7e-23dd-4967-9850-b99c1dca219e [ 1718.938434] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b31e0f83-6802-4bac-af1f-6f17ca3795bd tempest-ServerActionsTestOtherB-2016325214 tempest-ServerActionsTestOtherB-2016325214-project-member] Lock "47ba3b7e-23dd-4967-9850-b99c1dca219e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.672s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1727.031048] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1727.031337] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1727.535975] env[68437]: DEBUG nova.compute.manager [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1728.057718] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1728.057987] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1728.059463] env[68437]: INFO nova.compute.claims [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1729.093502] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d600fdea-adef-45f6-81fc-87cb302c1d32 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.101284] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab58054-9eb6-401d-8eb0-46de60612e25 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.130368] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da269efc-9428-49f8-a684-08f9f3433919 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.136795] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92fcdee-803a-404f-b958-d72984f6322f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.149231] env[68437]: DEBUG nova.compute.provider_tree [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1729.652502] env[68437]: DEBUG nova.scheduler.client.report [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1730.157502] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.099s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1730.158080] env[68437]: DEBUG nova.compute.manager [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1730.663058] env[68437]: DEBUG nova.compute.utils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1730.664204] env[68437]: DEBUG nova.compute.manager [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1730.664380] env[68437]: DEBUG nova.network.neutron [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1730.701683] env[68437]: DEBUG nova.policy [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39db84305e4a46249b78a3b0ed4c45c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d933df479af476ca27b9a4bfe6644c7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1730.954548] env[68437]: DEBUG nova.network.neutron [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Successfully created port: 2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1731.169392] env[68437]: DEBUG nova.compute.manager [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1732.179815] env[68437]: DEBUG nova.compute.manager [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1732.207576] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1732.207775] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1732.207934] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1732.208130] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1732.208276] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1732.208425] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1732.208635] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1732.208962] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1732.209157] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1732.209366] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1732.209494] env[68437]: DEBUG nova.virt.hardware [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1732.210376] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af933bea-1a9e-49b1-880d-ca622ea4f3ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.218452] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906e55dc-3e94-4a85-9599-cc64290911b0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.380786] env[68437]: DEBUG nova.compute.manager [req-e68f58c2-2cde-4bb5-960f-1fc1ca2d94f4 req-c8a4e0e5-b3a5-49f0-8257-9e15560be723 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received event network-vif-plugged-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1732.381044] env[68437]: DEBUG oslo_concurrency.lockutils [req-e68f58c2-2cde-4bb5-960f-1fc1ca2d94f4 req-c8a4e0e5-b3a5-49f0-8257-9e15560be723 service nova] Acquiring lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1732.381260] env[68437]: DEBUG oslo_concurrency.lockutils [req-e68f58c2-2cde-4bb5-960f-1fc1ca2d94f4 req-c8a4e0e5-b3a5-49f0-8257-9e15560be723 service nova] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1732.381417] env[68437]: DEBUG oslo_concurrency.lockutils [req-e68f58c2-2cde-4bb5-960f-1fc1ca2d94f4 req-c8a4e0e5-b3a5-49f0-8257-9e15560be723 service nova] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1732.381583] env[68437]: DEBUG nova.compute.manager [req-e68f58c2-2cde-4bb5-960f-1fc1ca2d94f4 req-c8a4e0e5-b3a5-49f0-8257-9e15560be723 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] No waiting events found dispatching network-vif-plugged-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1732.381745] env[68437]: WARNING nova.compute.manager [req-e68f58c2-2cde-4bb5-960f-1fc1ca2d94f4 req-c8a4e0e5-b3a5-49f0-8257-9e15560be723 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received unexpected event network-vif-plugged-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 for instance with vm_state building and task_state spawning. [ 1732.877730] env[68437]: DEBUG nova.network.neutron [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Successfully updated port: 2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1732.898241] env[68437]: DEBUG nova.compute.manager [req-0a04dfbc-cfe7-49a1-8cd2-539f38be29a8 req-ec86b242-ad81-4396-9d59-839c1fe59bb5 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received event network-changed-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1732.898454] env[68437]: DEBUG nova.compute.manager [req-0a04dfbc-cfe7-49a1-8cd2-539f38be29a8 req-ec86b242-ad81-4396-9d59-839c1fe59bb5 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Refreshing instance network info cache due to event network-changed-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1732.898668] env[68437]: DEBUG oslo_concurrency.lockutils [req-0a04dfbc-cfe7-49a1-8cd2-539f38be29a8 req-ec86b242-ad81-4396-9d59-839c1fe59bb5 service nova] Acquiring lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.898792] env[68437]: DEBUG oslo_concurrency.lockutils [req-0a04dfbc-cfe7-49a1-8cd2-539f38be29a8 req-ec86b242-ad81-4396-9d59-839c1fe59bb5 service nova] Acquired lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1732.898953] env[68437]: DEBUG nova.network.neutron [req-0a04dfbc-cfe7-49a1-8cd2-539f38be29a8 req-ec86b242-ad81-4396-9d59-839c1fe59bb5 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Refreshing network info cache for port 2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1733.380658] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.432756] env[68437]: DEBUG nova.network.neutron [req-0a04dfbc-cfe7-49a1-8cd2-539f38be29a8 req-ec86b242-ad81-4396-9d59-839c1fe59bb5 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1733.504476] env[68437]: DEBUG nova.network.neutron [req-0a04dfbc-cfe7-49a1-8cd2-539f38be29a8 req-ec86b242-ad81-4396-9d59-839c1fe59bb5 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.006835] env[68437]: DEBUG oslo_concurrency.lockutils [req-0a04dfbc-cfe7-49a1-8cd2-539f38be29a8 req-ec86b242-ad81-4396-9d59-839c1fe59bb5 service nova] Releasing lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1734.007247] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1734.007411] env[68437]: DEBUG nova.network.neutron [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1734.536983] env[68437]: DEBUG nova.network.neutron [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1734.651306] env[68437]: DEBUG nova.network.neutron [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating instance_info_cache with network_info: [{"id": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "address": "fa:16:3e:b3:73:6e", "network": {"id": "8e0dcc68-2815-467d-b279-55fb352ed3ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-275279641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d933df479af476ca27b9a4bfe6644c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ae60b40-e7", "ovs_interfaceid": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.153749] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1735.154090] env[68437]: DEBUG nova.compute.manager [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Instance network_info: |[{"id": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "address": "fa:16:3e:b3:73:6e", "network": {"id": "8e0dcc68-2815-467d-b279-55fb352ed3ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-275279641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d933df479af476ca27b9a4bfe6644c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ae60b40-e7", "ovs_interfaceid": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1735.154528] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:73:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1735.161948] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Creating folder: Project (6d933df479af476ca27b9a4bfe6644c7). Parent ref: group-v590848. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1735.162243] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e54c0682-febc-49b8-9e21-567bbc539125 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.174862] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Created folder: Project (6d933df479af476ca27b9a4bfe6644c7) in parent group-v590848. [ 1735.175103] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Creating folder: Instances. Parent ref: group-v591191. {{(pid=68437) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1735.175308] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4538885-f68b-42a7-b38e-9d877e861272 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.184102] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Created folder: Instances in parent group-v591191. [ 1735.184318] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1735.184490] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1735.184669] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f0d74331-fe20-48b1-a538-02c8b9ce6381 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.202962] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1735.202962] env[68437]: value = "task-2945396" [ 1735.202962] env[68437]: _type = "Task" [ 1735.202962] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.210275] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945396, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.713271] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945396, 'name': CreateVM_Task, 'duration_secs': 0.30483} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.713672] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1735.714334] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.714542] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1735.714913] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1735.715236] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc09c53a-52bd-4048-bacc-f35fc4f835d2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.719663] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1735.719663] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52eafb26-117d-43df-c378-ff82c2d5f257" [ 1735.719663] env[68437]: _type = "Task" [ 1735.719663] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.727374] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52eafb26-117d-43df-c378-ff82c2d5f257, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.230420] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52eafb26-117d-43df-c378-ff82c2d5f257, 'name': SearchDatastore_Task, 'duration_secs': 0.008579} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.230730] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1736.230968] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1736.231226] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.231422] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1736.231562] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1736.231812] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cd33df6-9dc0-4823-a874-657e01eef559 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.239745] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1736.239912] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1736.240635] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84a08fed-59cb-46f1-beff-4b598b1d8571 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.245228] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1736.245228] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]528ac81b-6b8a-e42d-9f4e-914ac0a9926e" [ 1736.245228] env[68437]: _type = "Task" [ 1736.245228] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.252447] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528ac81b-6b8a-e42d-9f4e-914ac0a9926e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.755583] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]528ac81b-6b8a-e42d-9f4e-914ac0a9926e, 'name': SearchDatastore_Task, 'duration_secs': 0.009346} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.756389] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e879a0ba-9c55-40cc-98a4-8540609b285c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.761141] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1736.761141] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]526ab36d-04ed-0493-5ccc-9231a438e2a0" [ 1736.761141] env[68437]: _type = "Task" [ 1736.761141] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.768336] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526ab36d-04ed-0493-5ccc-9231a438e2a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.271947] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]526ab36d-04ed-0493-5ccc-9231a438e2a0, 'name': SearchDatastore_Task, 'duration_secs': 0.011605} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.272182] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1737.272439] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b/33375d13-870c-449a-8a42-9ad4b1a24f4b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1737.272682] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7f479be-a9e2-4f34-9b89-1041e4e3f40e {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.279340] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1737.279340] env[68437]: value = "task-2945397" [ 1737.279340] env[68437]: _type = "Task" [ 1737.279340] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.286820] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.789084] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945397, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460162} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.789534] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b/33375d13-870c-449a-8a42-9ad4b1a24f4b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1737.789534] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1737.789751] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d09442e-108f-4fb0-b82e-547a8101ae9a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.796401] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1737.796401] env[68437]: value = "task-2945398" [ 1737.796401] env[68437]: _type = "Task" [ 1737.796401] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.803491] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945398, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.306014] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945398, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070915} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.306375] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1738.307146] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60efc51a-1732-41ca-bc02-2b1e68d1d106 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.329365] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b/33375d13-870c-449a-8a42-9ad4b1a24f4b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1738.329593] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09994a33-2022-4436-b417-dd8e88381ecf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.347627] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1738.347627] env[68437]: value = "task-2945399" [ 1738.347627] env[68437]: _type = "Task" [ 1738.347627] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.354862] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945399, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.856800] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945399, 'name': ReconfigVM_Task, 'duration_secs': 0.439404} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.857247] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b/33375d13-870c-449a-8a42-9ad4b1a24f4b.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1738.857735] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-85674f95-4cde-47de-936b-bc1ef0c6cc88 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.863654] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1738.863654] env[68437]: value = "task-2945400" [ 1738.863654] env[68437]: _type = "Task" [ 1738.863654] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.870816] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945400, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.373321] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945400, 'name': Rename_Task, 'duration_secs': 0.142426} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.373567] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1739.373805] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-584f9662-9c84-4ce4-ae91-7161243bd954 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.380644] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1739.380644] env[68437]: value = "task-2945401" [ 1739.380644] env[68437]: _type = "Task" [ 1739.380644] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.387496] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945401, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.890029] env[68437]: DEBUG oslo_vmware.api [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945401, 'name': PowerOnVM_Task, 'duration_secs': 0.460998} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.890459] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1739.890459] env[68437]: INFO nova.compute.manager [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Took 7.71 seconds to spawn the instance on the hypervisor. [ 1739.890624] env[68437]: DEBUG nova.compute.manager [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1739.891396] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd2e0a7-0e82-4cea-a5aa-f1b77289f18b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.410665] env[68437]: INFO nova.compute.manager [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Took 12.37 seconds to build instance. [ 1740.912691] env[68437]: DEBUG oslo_concurrency.lockutils [None req-5d8a285b-a879-4f04-8a1c-11cc16bc0151 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.881s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1741.468023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1741.468023] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1741.970161] env[68437]: DEBUG nova.compute.manager [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Starting instance... {{(pid=68437) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1742.492903] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1742.493242] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1742.494755] env[68437]: INFO nova.compute.claims [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1743.538472] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1a6eb2-f9cb-42df-88dc-1aa46ed56a95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.547100] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937348a7-82bc-4eb7-8c83-9e7dbdac97a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.575987] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca725d6-b7bb-42c3-bd60-64f6deeff53a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.583047] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68470ecc-f946-49dd-8ed5-31cd1673dfe0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.595134] env[68437]: DEBUG nova.compute.provider_tree [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1744.098326] env[68437]: DEBUG nova.scheduler.client.report [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1744.603726] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1744.604240] env[68437]: DEBUG nova.compute.manager [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Start building networks asynchronously for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1745.109367] env[68437]: DEBUG nova.compute.utils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Using /dev/sd instead of None {{(pid=68437) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1745.110769] env[68437]: DEBUG nova.compute.manager [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Allocating IP information in the background. {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1745.110942] env[68437]: DEBUG nova.network.neutron [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] allocate_for_instance() {{(pid=68437) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1206}} [ 1745.157976] env[68437]: DEBUG nova.policy [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39db84305e4a46249b78a3b0ed4c45c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d933df479af476ca27b9a4bfe6644c7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68437) authorize /opt/stack/nova/nova/policy.py:192}} [ 1745.588808] env[68437]: DEBUG nova.network.neutron [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Successfully created port: 0f5a89ed-2c49-42f1-bc80-ff5d77662264 {{(pid=68437) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1745.614590] env[68437]: DEBUG nova.compute.manager [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Start building block device mappings for instance. {{(pid=68437) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1746.626823] env[68437]: DEBUG nova.compute.manager [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Start spawning the instance on the hypervisor. {{(pid=68437) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1746.652893] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-11T18:35:44Z,direct_url=,disk_format='vmdk',id=a272f526-6b8d-4a29-bd06-cd29ab5fabbe,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a61f759776444b78ab0e8a39df9260fa',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-11T18:35:45Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1746.653161] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1746.653327] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1746.653550] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1746.653704] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1746.653855] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1746.654079] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1746.654244] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1746.654412] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1746.654574] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1746.654744] env[68437]: DEBUG nova.virt.hardware [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1746.655662] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3e40ae-62fe-47fe-952e-9a0c68a539f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.663556] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4331feb6-45b3-4970-9cf9-13a600b8b87a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.902380] env[68437]: DEBUG nova.compute.manager [req-0c877161-7df0-41a8-aadf-981ba9e4fe18 req-9ab298cc-2e3e-46f6-8e43-c32814771a23 service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Received event network-vif-plugged-0f5a89ed-2c49-42f1-bc80-ff5d77662264 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1746.902609] env[68437]: DEBUG oslo_concurrency.lockutils [req-0c877161-7df0-41a8-aadf-981ba9e4fe18 req-9ab298cc-2e3e-46f6-8e43-c32814771a23 service nova] Acquiring lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1746.902821] env[68437]: DEBUG oslo_concurrency.lockutils [req-0c877161-7df0-41a8-aadf-981ba9e4fe18 req-9ab298cc-2e3e-46f6-8e43-c32814771a23 service nova] Lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1746.902989] env[68437]: DEBUG oslo_concurrency.lockutils [req-0c877161-7df0-41a8-aadf-981ba9e4fe18 req-9ab298cc-2e3e-46f6-8e43-c32814771a23 service nova] Lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1746.903171] env[68437]: DEBUG nova.compute.manager [req-0c877161-7df0-41a8-aadf-981ba9e4fe18 req-9ab298cc-2e3e-46f6-8e43-c32814771a23 service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] No waiting events found dispatching network-vif-plugged-0f5a89ed-2c49-42f1-bc80-ff5d77662264 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1746.903340] env[68437]: WARNING nova.compute.manager [req-0c877161-7df0-41a8-aadf-981ba9e4fe18 req-9ab298cc-2e3e-46f6-8e43-c32814771a23 service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Received unexpected event network-vif-plugged-0f5a89ed-2c49-42f1-bc80-ff5d77662264 for instance with vm_state building and task_state spawning. [ 1747.461371] env[68437]: DEBUG nova.network.neutron [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Successfully updated port: 0f5a89ed-2c49-42f1-bc80-ff5d77662264 {{(pid=68437) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1747.480776] env[68437]: DEBUG nova.compute.manager [req-41b773b3-b16d-4fd3-b0fb-81a4f3ec1886 req-d5e50130-f315-4188-9a9e-ab60ca8ee233 service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Received event network-changed-0f5a89ed-2c49-42f1-bc80-ff5d77662264 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1747.481023] env[68437]: DEBUG nova.compute.manager [req-41b773b3-b16d-4fd3-b0fb-81a4f3ec1886 req-d5e50130-f315-4188-9a9e-ab60ca8ee233 service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Refreshing instance network info cache due to event network-changed-0f5a89ed-2c49-42f1-bc80-ff5d77662264. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1747.481250] env[68437]: DEBUG oslo_concurrency.lockutils [req-41b773b3-b16d-4fd3-b0fb-81a4f3ec1886 req-d5e50130-f315-4188-9a9e-ab60ca8ee233 service nova] Acquiring lock "refresh_cache-2cb7f89c-1d21-4577-8131-b93961b7ab1d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.481386] env[68437]: DEBUG oslo_concurrency.lockutils [req-41b773b3-b16d-4fd3-b0fb-81a4f3ec1886 req-d5e50130-f315-4188-9a9e-ab60ca8ee233 service nova] Acquired lock "refresh_cache-2cb7f89c-1d21-4577-8131-b93961b7ab1d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1747.481530] env[68437]: DEBUG nova.network.neutron [req-41b773b3-b16d-4fd3-b0fb-81a4f3ec1886 req-d5e50130-f315-4188-9a9e-ab60ca8ee233 service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Refreshing network info cache for port 0f5a89ed-2c49-42f1-bc80-ff5d77662264 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1747.963381] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "refresh_cache-2cb7f89c-1d21-4577-8131-b93961b7ab1d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1748.012400] env[68437]: DEBUG nova.network.neutron [req-41b773b3-b16d-4fd3-b0fb-81a4f3ec1886 req-d5e50130-f315-4188-9a9e-ab60ca8ee233 service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1748.077948] env[68437]: DEBUG nova.network.neutron [req-41b773b3-b16d-4fd3-b0fb-81a4f3ec1886 req-d5e50130-f315-4188-9a9e-ab60ca8ee233 service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.580359] env[68437]: DEBUG oslo_concurrency.lockutils [req-41b773b3-b16d-4fd3-b0fb-81a4f3ec1886 req-d5e50130-f315-4188-9a9e-ab60ca8ee233 service nova] Releasing lock "refresh_cache-2cb7f89c-1d21-4577-8131-b93961b7ab1d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1748.580684] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "refresh_cache-2cb7f89c-1d21-4577-8131-b93961b7ab1d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1748.580844] env[68437]: DEBUG nova.network.neutron [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1749.111690] env[68437]: DEBUG nova.network.neutron [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Instance cache missing network info. {{(pid=68437) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3381}} [ 1749.230276] env[68437]: DEBUG nova.network.neutron [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Updating instance_info_cache with network_info: [{"id": "0f5a89ed-2c49-42f1-bc80-ff5d77662264", "address": "fa:16:3e:12:f0:6f", "network": {"id": "8e0dcc68-2815-467d-b279-55fb352ed3ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-275279641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d933df479af476ca27b9a4bfe6644c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f5a89ed-2c", "ovs_interfaceid": "0f5a89ed-2c49-42f1-bc80-ff5d77662264", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.733155] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "refresh_cache-2cb7f89c-1d21-4577-8131-b93961b7ab1d" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1749.733507] env[68437]: DEBUG nova.compute.manager [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Instance network_info: |[{"id": "0f5a89ed-2c49-42f1-bc80-ff5d77662264", "address": "fa:16:3e:12:f0:6f", "network": {"id": "8e0dcc68-2815-467d-b279-55fb352ed3ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-275279641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d933df479af476ca27b9a4bfe6644c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f5a89ed-2c", "ovs_interfaceid": "0f5a89ed-2c49-42f1-bc80-ff5d77662264", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68437) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1749.733947] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:f0:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f5a89ed-2c49-42f1-bc80-ff5d77662264', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1749.741266] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1749.741485] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1749.741709] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d833b1e5-cc21-4f66-ac80-df5d451e3cd1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.762249] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1749.762249] env[68437]: value = "task-2945402" [ 1749.762249] env[68437]: _type = "Task" [ 1749.762249] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.769565] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945402, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.272341] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945402, 'name': CreateVM_Task, 'duration_secs': 0.297692} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.272734] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1750.273196] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.273379] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1750.273680] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1750.273945] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1206c87-ffb5-498f-b705-47eae7a3b932 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.278887] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1750.278887] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52840f54-dd06-a75c-2a84-425166a10597" [ 1750.278887] env[68437]: _type = "Task" [ 1750.278887] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.287024] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52840f54-dd06-a75c-2a84-425166a10597, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.789056] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52840f54-dd06-a75c-2a84-425166a10597, 'name': SearchDatastore_Task, 'duration_secs': 0.013019} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.790029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1750.790029] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Processing image a272f526-6b8d-4a29-bd06-cd29ab5fabbe {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1750.790029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.790029] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1750.790252] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1750.790423] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4b1b2cf-b327-4e46-8a34-ce6161c320f7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.799058] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1750.799239] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1750.800125] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebf37f52-a87d-4677-a1a4-e5f61a85c61f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.805473] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1750.805473] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52de29b9-b879-c24b-d710-2d0ad30a9b45" [ 1750.805473] env[68437]: _type = "Task" [ 1750.805473] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.812930] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52de29b9-b879-c24b-d710-2d0ad30a9b45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.315721] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52de29b9-b879-c24b-d710-2d0ad30a9b45, 'name': SearchDatastore_Task, 'duration_secs': 0.009455} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.316468] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0484c8d-6418-40f0-afa4-8e85bf1e7883 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.321117] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1751.321117] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52af9f76-2624-3f13-0542-21d02ef40a5b" [ 1751.321117] env[68437]: _type = "Task" [ 1751.321117] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.327989] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52af9f76-2624-3f13-0542-21d02ef40a5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.831787] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52af9f76-2624-3f13-0542-21d02ef40a5b, 'name': SearchDatastore_Task, 'duration_secs': 0.010327} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.832018] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1751.832273] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2cb7f89c-1d21-4577-8131-b93961b7ab1d/2cb7f89c-1d21-4577-8131-b93961b7ab1d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1751.832520] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45849e79-73b9-4b5f-ad53-47be6260a057 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.838685] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1751.838685] env[68437]: value = "task-2945403" [ 1751.838685] env[68437]: _type = "Task" [ 1751.838685] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.845593] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.347840] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.425001} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.348191] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a272f526-6b8d-4a29-bd06-cd29ab5fabbe/a272f526-6b8d-4a29-bd06-cd29ab5fabbe.vmdk to [datastore1] 2cb7f89c-1d21-4577-8131-b93961b7ab1d/2cb7f89c-1d21-4577-8131-b93961b7ab1d.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1752.348374] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Extending root virtual disk to 1048576 {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1752.348615] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77f8faeb-73e1-47fc-bba2-0f6d71b8a76b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.355170] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1752.355170] env[68437]: value = "task-2945404" [ 1752.355170] env[68437]: _type = "Task" [ 1752.355170] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.362431] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.864155] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065328} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.864431] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Extended root virtual disk {{(pid=68437) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1752.865170] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36481bb-0819-49c8-aa22-4d0b3444a2eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.885654] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 2cb7f89c-1d21-4577-8131-b93961b7ab1d/2cb7f89c-1d21-4577-8131-b93961b7ab1d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1752.885862] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8063d23d-c37e-4bf1-8f5e-a2092a4840a2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.904614] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1752.904614] env[68437]: value = "task-2945405" [ 1752.904614] env[68437]: _type = "Task" [ 1752.904614] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.911668] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.413895] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945405, 'name': ReconfigVM_Task, 'duration_secs': 0.272182} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.414290] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 2cb7f89c-1d21-4577-8131-b93961b7ab1d/2cb7f89c-1d21-4577-8131-b93961b7ab1d.vmdk or device None with type sparse {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1753.414953] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb3ff0ac-70a4-4d53-9041-0cb746acb8ad {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.420820] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1753.420820] env[68437]: value = "task-2945406" [ 1753.420820] env[68437]: _type = "Task" [ 1753.420820] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.428955] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945406, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.930901] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945406, 'name': Rename_Task, 'duration_secs': 0.147539} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.931173] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1753.931418] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e25a2a1-f4a4-40f2-ac23-a12e28c7087f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.937024] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1753.937024] env[68437]: value = "task-2945407" [ 1753.937024] env[68437]: _type = "Task" [ 1753.937024] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.944204] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945407, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.446887] env[68437]: DEBUG oslo_vmware.api [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945407, 'name': PowerOnVM_Task, 'duration_secs': 0.437966} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.447384] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1754.447544] env[68437]: INFO nova.compute.manager [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Took 7.82 seconds to spawn the instance on the hypervisor. [ 1754.447800] env[68437]: DEBUG nova.compute.manager [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1754.448575] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966881cd-6b04-4651-b479-d0256319fa97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.967628] env[68437]: INFO nova.compute.manager [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Took 12.49 seconds to build instance. [ 1755.469487] env[68437]: DEBUG oslo_concurrency.lockutils [None req-0878869b-04b2-49d5-a9a6-7387ab84083a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.002s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1755.818826] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1755.819218] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1755.819498] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1755.819726] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1755.819923] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1755.822032] env[68437]: INFO nova.compute.manager [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Terminating instance [ 1755.907791] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1756.230660] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1756.230871] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1756.326032] env[68437]: DEBUG nova.compute.manager [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1756.326270] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1756.327431] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ceb945-08a0-49e3-a6d1-4c633e396f53 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.335775] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1756.336021] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e6efd1f-8f36-49cb-84e4-bf00a9c029df {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.342485] env[68437]: DEBUG oslo_vmware.api [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1756.342485] env[68437]: value = "task-2945408" [ 1756.342485] env[68437]: _type = "Task" [ 1756.342485] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.350544] env[68437]: DEBUG oslo_vmware.api [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.852196] env[68437]: DEBUG oslo_vmware.api [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945408, 'name': PowerOffVM_Task, 'duration_secs': 0.179882} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.852489] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1756.852628] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1756.852872] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-960e2728-5de5-4f78-b0ec-87f6ddeb5e63 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.912339] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1756.912589] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1756.912765] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleting the datastore file [datastore1] 2cb7f89c-1d21-4577-8131-b93961b7ab1d {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1756.913039] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4eabbf84-25fe-4187-9103-337314293f8b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.919820] env[68437]: DEBUG oslo_vmware.api [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1756.919820] env[68437]: value = "task-2945410" [ 1756.919820] env[68437]: _type = "Task" [ 1756.919820] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.926811] env[68437]: DEBUG oslo_vmware.api [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945410, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.226815] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1757.230447] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1757.230649] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1757.429343] env[68437]: DEBUG oslo_vmware.api [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945410, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149327} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.429595] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1757.429760] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1757.429933] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1757.430123] env[68437]: INFO nova.compute.manager [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1757.430371] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1757.430558] env[68437]: DEBUG nova.compute.manager [-] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1757.430651] env[68437]: DEBUG nova.network.neutron [-] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1757.684542] env[68437]: DEBUG nova.compute.manager [req-76d0f014-678b-4b7d-b001-817889bfdc18 req-9dbbf671-c359-40a5-9170-1e0afce67dfe service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Received event network-vif-deleted-0f5a89ed-2c49-42f1-bc80-ff5d77662264 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1757.684868] env[68437]: INFO nova.compute.manager [req-76d0f014-678b-4b7d-b001-817889bfdc18 req-9dbbf671-c359-40a5-9170-1e0afce67dfe service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Neutron deleted interface 0f5a89ed-2c49-42f1-bc80-ff5d77662264; detaching it from the instance and deleting it from the info cache [ 1757.685089] env[68437]: DEBUG nova.network.neutron [req-76d0f014-678b-4b7d-b001-817889bfdc18 req-9dbbf671-c359-40a5-9170-1e0afce67dfe service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.167599] env[68437]: DEBUG nova.network.neutron [-] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.187383] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-959dc990-25d2-4be0-8789-52b58d098aeb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.197062] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03de9bb-0e31-4c8b-9918-e77dda5374dc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.220879] env[68437]: DEBUG nova.compute.manager [req-76d0f014-678b-4b7d-b001-817889bfdc18 req-9dbbf671-c359-40a5-9170-1e0afce67dfe service nova] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Detach interface failed, port_id=0f5a89ed-2c49-42f1-bc80-ff5d77662264, reason: Instance 2cb7f89c-1d21-4577-8131-b93961b7ab1d could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1758.670593] env[68437]: INFO nova.compute.manager [-] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Took 1.24 seconds to deallocate network for instance. [ 1759.177084] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1759.177409] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1759.177628] env[68437]: DEBUG nova.objects.instance [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lazy-loading 'resources' on Instance uuid 2cb7f89c-1d21-4577-8131-b93961b7ab1d {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1759.724438] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6eaad69-839c-4a23-9aa2-12e20cc919f9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.732016] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a545358-2ef1-4232-82f3-97e1f8444379 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.762788] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f35342-9630-448c-ab4c-38e043164862 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.769861] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c13fff-24a2-422f-8ee4-9457bb325c5b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.782606] env[68437]: DEBUG nova.compute.provider_tree [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1760.230812] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.231317] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.285875] env[68437]: DEBUG nova.scheduler.client.report [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1760.734623] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1760.790644] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.613s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1760.792858] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.058s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1760.793049] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1760.793212] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1760.794064] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b474eb-ae89-47fb-b9c5-f52e35df4a50 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.801658] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a82ebe-95b2-4912-b518-f1452e6780ab {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.815394] env[68437]: INFO nova.scheduler.client.report [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleted allocations for instance 2cb7f89c-1d21-4577-8131-b93961b7ab1d [ 1760.817043] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8af9ab5-6546-4933-a7fe-964424d388e6 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.824890] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88576a5f-99b1-4461-a836-8d02662cfb41 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.854058] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181098MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1760.854207] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1760.854415] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1761.325058] env[68437]: DEBUG oslo_concurrency.lockutils [None req-ecbe1f0f-6358-4ebf-8173-8bea991b1c5c tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "2cb7f89c-1d21-4577-8131-b93961b7ab1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.506s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1761.872623] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 33375d13-870c-449a-8a42-9ad4b1a24f4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1761.872829] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1761.872973] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1761.897065] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee31769-5a01-4a6c-af1b-ab8f13ad2419 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.904745] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee77bb0f-bfa0-4af9-b60c-1f0405d30409 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.934148] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445eee8d-d593-4a9a-ac44-306657d8e855 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.941128] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2924fa-d067-4c3e-9545-533b3c8c90f3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.953838] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.457263] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1762.961775] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1762.961979] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.108s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1763.962702] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.325556] env[68437]: DEBUG oslo_concurrency.lockutils [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1771.326020] env[68437]: DEBUG oslo_concurrency.lockutils [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1771.326020] env[68437]: INFO nova.compute.manager [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Shelving [ 1772.336050] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1772.336378] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-460b86c9-135e-42e0-8b63-3f62bc0037e2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.344735] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1772.344735] env[68437]: value = "task-2945411" [ 1772.344735] env[68437]: _type = "Task" [ 1772.344735] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.352709] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945411, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.854715] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945411, 'name': PowerOffVM_Task, 'duration_secs': 0.16595} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.854978] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1772.855919] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454ccfa4-208d-44e6-8ddb-c6ce9676676f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.874073] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa533d4-21b9-4e5e-849b-5655723a5f02 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.384706] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Creating Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1773.385165] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1c99d719-4c6a-46a6-8c7e-8beee3276dcc {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.393363] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1773.393363] env[68437]: value = "task-2945412" [ 1773.393363] env[68437]: _type = "Task" [ 1773.393363] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.401000] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945412, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.903600] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945412, 'name': CreateSnapshot_Task, 'duration_secs': 0.447484} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.903867] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Created Snapshot of the VM instance {{(pid=68437) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1773.904587] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce04db2d-795c-485f-a2a2-d2945cb37166 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.421334] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Creating linked-clone VM from snapshot {{(pid=68437) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1774.421716] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-141c43a2-265c-460e-a739-9c7f6db18b44 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.430146] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1774.430146] env[68437]: value = "task-2945413" [ 1774.430146] env[68437]: _type = "Task" [ 1774.430146] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.437828] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945413, 'name': CloneVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.940276] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945413, 'name': CloneVM_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.440479] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945413, 'name': CloneVM_Task, 'duration_secs': 0.945365} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.440861] env[68437]: INFO nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Created linked-clone VM from snapshot [ 1775.441510] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d72f0de-29f0-4211-b0a2-9a89a19c57a7 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.448212] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Uploading image daab69ad-ed0c-4903-b23a-7c66ae071141 {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1775.471813] env[68437]: DEBUG oslo_vmware.rw_handles [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1775.471813] env[68437]: value = "vm-591196" [ 1775.471813] env[68437]: _type = "VirtualMachine" [ 1775.471813] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1775.472057] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4eb1eae8-8c2a-4a18-ad71-488a2eec7f40 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.478556] env[68437]: DEBUG oslo_vmware.rw_handles [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lease: (returnval){ [ 1775.478556] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ebbf1e-8f5d-6559-1114-22e00ffcda2e" [ 1775.478556] env[68437]: _type = "HttpNfcLease" [ 1775.478556] env[68437]: } obtained for exporting VM: (result){ [ 1775.478556] env[68437]: value = "vm-591196" [ 1775.478556] env[68437]: _type = "VirtualMachine" [ 1775.478556] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1775.478847] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the lease: (returnval){ [ 1775.478847] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ebbf1e-8f5d-6559-1114-22e00ffcda2e" [ 1775.478847] env[68437]: _type = "HttpNfcLease" [ 1775.478847] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1775.485183] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1775.485183] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ebbf1e-8f5d-6559-1114-22e00ffcda2e" [ 1775.485183] env[68437]: _type = "HttpNfcLease" [ 1775.485183] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1775.987365] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1775.987365] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ebbf1e-8f5d-6559-1114-22e00ffcda2e" [ 1775.987365] env[68437]: _type = "HttpNfcLease" [ 1775.987365] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1775.987682] env[68437]: DEBUG oslo_vmware.rw_handles [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1775.987682] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52ebbf1e-8f5d-6559-1114-22e00ffcda2e" [ 1775.987682] env[68437]: _type = "HttpNfcLease" [ 1775.987682] env[68437]: }. {{(pid=68437) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1775.988405] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed7fc6b-0d42-40d7-a07c-3c2cfed7f6cf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.995279] env[68437]: DEBUG oslo_vmware.rw_handles [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d7600-9390-de57-ab68-f3d4515657ff/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1775.995448] env[68437]: DEBUG oslo_vmware.rw_handles [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d7600-9390-de57-ab68-f3d4515657ff/disk-0.vmdk for reading. {{(pid=68437) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1776.083871] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d5939200-dd76-42e5-b3f7-267fc080cf04 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.312711] env[68437]: DEBUG oslo_vmware.rw_handles [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d7600-9390-de57-ab68-f3d4515657ff/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1783.313621] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ea0ee9-d3f8-4962-a82b-34239642490a {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.319645] env[68437]: DEBUG oslo_vmware.rw_handles [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d7600-9390-de57-ab68-f3d4515657ff/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1783.319835] env[68437]: ERROR oslo_vmware.rw_handles [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d7600-9390-de57-ab68-f3d4515657ff/disk-0.vmdk due to incomplete transfer. [ 1783.320071] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7e4d9b83-da29-4de2-b261-d82d0aaaa9cb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.327096] env[68437]: DEBUG oslo_vmware.rw_handles [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527d7600-9390-de57-ab68-f3d4515657ff/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1783.327289] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Uploaded image daab69ad-ed0c-4903-b23a-7c66ae071141 to the Glance image server {{(pid=68437) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1783.329582] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Destroying the VM {{(pid=68437) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1783.329794] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6d2c7c28-716b-474a-b426-423dc18c5172 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.335551] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1783.335551] env[68437]: value = "task-2945415" [ 1783.335551] env[68437]: _type = "Task" [ 1783.335551] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.342955] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945415, 'name': Destroy_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.845652] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945415, 'name': Destroy_Task, 'duration_secs': 0.331626} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.845907] env[68437]: INFO nova.virt.vmwareapi.vm_util [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Destroyed the VM [ 1783.846146] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Deleting Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1783.846387] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-370b9e6a-2b50-4988-9b2e-82a08c976e51 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.851996] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1783.851996] env[68437]: value = "task-2945416" [ 1783.851996] env[68437]: _type = "Task" [ 1783.851996] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.858816] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945416, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.361554] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945416, 'name': RemoveSnapshot_Task, 'duration_secs': 0.372458} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.361856] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Deleted Snapshot of the VM instance {{(pid=68437) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1784.362097] env[68437]: DEBUG nova.compute.manager [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1784.362843] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06acd5b-36c5-4188-bb4e-03b426665c58 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.874467] env[68437]: INFO nova.compute.manager [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Shelve offloading [ 1785.377965] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1785.379795] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3045573-624d-4db0-bca7-bacff976d73c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.386602] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1785.386602] env[68437]: value = "task-2945417" [ 1785.386602] env[68437]: _type = "Task" [ 1785.386602] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.393814] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.897918] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] VM already powered off {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1785.898172] env[68437]: DEBUG nova.compute.manager [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1785.898911] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513d08f9-d8cd-4480-9770-40013256ff74 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.904190] env[68437]: DEBUG oslo_concurrency.lockutils [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.904354] env[68437]: DEBUG oslo_concurrency.lockutils [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1785.904520] env[68437]: DEBUG nova.network.neutron [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1786.580360] env[68437]: DEBUG nova.network.neutron [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating instance_info_cache with network_info: [{"id": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "address": "fa:16:3e:b3:73:6e", "network": {"id": "8e0dcc68-2815-467d-b279-55fb352ed3ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-275279641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d933df479af476ca27b9a4bfe6644c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ae60b40-e7", "ovs_interfaceid": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.083333] env[68437]: DEBUG oslo_concurrency.lockutils [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1787.298861] env[68437]: DEBUG nova.compute.manager [req-078eb6a8-674d-481b-98c2-9d67bc6b06f7 req-6eec8890-4230-4b34-b94b-202fb215b5bf service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received event network-vif-unplugged-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1787.299054] env[68437]: DEBUG oslo_concurrency.lockutils [req-078eb6a8-674d-481b-98c2-9d67bc6b06f7 req-6eec8890-4230-4b34-b94b-202fb215b5bf service nova] Acquiring lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1787.299343] env[68437]: DEBUG oslo_concurrency.lockutils [req-078eb6a8-674d-481b-98c2-9d67bc6b06f7 req-6eec8890-4230-4b34-b94b-202fb215b5bf service nova] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1787.299447] env[68437]: DEBUG oslo_concurrency.lockutils [req-078eb6a8-674d-481b-98c2-9d67bc6b06f7 req-6eec8890-4230-4b34-b94b-202fb215b5bf service nova] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1787.299600] env[68437]: DEBUG nova.compute.manager [req-078eb6a8-674d-481b-98c2-9d67bc6b06f7 req-6eec8890-4230-4b34-b94b-202fb215b5bf service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] No waiting events found dispatching network-vif-unplugged-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1787.299775] env[68437]: WARNING nova.compute.manager [req-078eb6a8-674d-481b-98c2-9d67bc6b06f7 req-6eec8890-4230-4b34-b94b-202fb215b5bf service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received unexpected event network-vif-unplugged-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 for instance with vm_state shelved and task_state shelving_offloading. [ 1787.331198] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1787.332089] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38905f77-27be-4827-9f01-1f9d87282aaf {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.339948] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1787.340148] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bc5dfe2-6476-4bd2-85b1-4567c54b5e55 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.407770] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1787.408057] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1787.408255] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleting the datastore file [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1787.408520] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73c7e2b7-14a0-4895-8e11-179f4011efa9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.415264] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1787.415264] env[68437]: value = "task-2945419" [ 1787.415264] env[68437]: _type = "Task" [ 1787.415264] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.422433] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945419, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.926334] env[68437]: DEBUG oslo_vmware.api [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945419, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134146} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.926713] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1787.926713] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1787.926928] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1787.948814] env[68437]: INFO nova.scheduler.client.report [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleted allocations for instance 33375d13-870c-449a-8a42-9ad4b1a24f4b [ 1788.454060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.454060] env[68437]: DEBUG oslo_concurrency.lockutils [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1788.454060] env[68437]: DEBUG nova.objects.instance [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lazy-loading 'resources' on Instance uuid 33375d13-870c-449a-8a42-9ad4b1a24f4b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1788.956906] env[68437]: DEBUG nova.objects.instance [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lazy-loading 'numa_topology' on Instance uuid 33375d13-870c-449a-8a42-9ad4b1a24f4b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1789.324294] env[68437]: DEBUG nova.compute.manager [req-51bda486-a057-4329-832b-770f29fce694 req-078f9830-b256-4cd7-a7f6-5bedc485c213 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received event network-changed-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1789.324495] env[68437]: DEBUG nova.compute.manager [req-51bda486-a057-4329-832b-770f29fce694 req-078f9830-b256-4cd7-a7f6-5bedc485c213 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Refreshing instance network info cache due to event network-changed-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1789.324707] env[68437]: DEBUG oslo_concurrency.lockutils [req-51bda486-a057-4329-832b-770f29fce694 req-078f9830-b256-4cd7-a7f6-5bedc485c213 service nova] Acquiring lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.324850] env[68437]: DEBUG oslo_concurrency.lockutils [req-51bda486-a057-4329-832b-770f29fce694 req-078f9830-b256-4cd7-a7f6-5bedc485c213 service nova] Acquired lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1789.325018] env[68437]: DEBUG nova.network.neutron [req-51bda486-a057-4329-832b-770f29fce694 req-078f9830-b256-4cd7-a7f6-5bedc485c213 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Refreshing network info cache for port 2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1789.459827] env[68437]: DEBUG nova.objects.base [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Object Instance<33375d13-870c-449a-8a42-9ad4b1a24f4b> lazy-loaded attributes: resources,numa_topology {{(pid=68437) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1789.487187] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d03b0fb-8720-4714-8f6d-f54300df36ff {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.495897] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f42e021-8d24-4d59-9dd4-78324f0f1257 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.525818] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2760acd8-d9ea-4b33-8962-d742ac7a97c5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.532342] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302c1bd8-f9c3-4d85-9f11-6de9c33903a9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.545071] env[68437]: DEBUG nova.compute.provider_tree [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1789.983611] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1790.013154] env[68437]: DEBUG nova.network.neutron [req-51bda486-a057-4329-832b-770f29fce694 req-078f9830-b256-4cd7-a7f6-5bedc485c213 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updated VIF entry in instance network info cache for port 2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1790.013515] env[68437]: DEBUG nova.network.neutron [req-51bda486-a057-4329-832b-770f29fce694 req-078f9830-b256-4cd7-a7f6-5bedc485c213 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating instance_info_cache with network_info: [{"id": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "address": "fa:16:3e:b3:73:6e", "network": {"id": "8e0dcc68-2815-467d-b279-55fb352ed3ca", "bridge": null, "label": "tempest-ServersNegativeTestJSON-275279641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d933df479af476ca27b9a4bfe6644c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap2ae60b40-e7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.047693] env[68437]: DEBUG nova.scheduler.client.report [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1790.516482] env[68437]: DEBUG oslo_concurrency.lockutils [req-51bda486-a057-4329-832b-770f29fce694 req-078f9830-b256-4cd7-a7f6-5bedc485c213 service nova] Releasing lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1790.551918] env[68437]: DEBUG oslo_concurrency.lockutils [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.098s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1791.060435] env[68437]: DEBUG oslo_concurrency.lockutils [None req-799747df-ed28-4221-aa8c-ceb051842b6b tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 19.734s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1791.061013] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.077s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1791.061013] env[68437]: INFO nova.compute.manager [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Unshelving [ 1792.085355] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1792.085652] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1792.085845] env[68437]: DEBUG nova.objects.instance [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lazy-loading 'pci_requests' on Instance uuid 33375d13-870c-449a-8a42-9ad4b1a24f4b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1792.589441] env[68437]: DEBUG nova.objects.instance [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lazy-loading 'numa_topology' on Instance uuid 33375d13-870c-449a-8a42-9ad4b1a24f4b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1793.092383] env[68437]: INFO nova.compute.claims [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1794.128273] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444545c6-3b21-4255-9e8d-f2aa89fc2c2b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.136089] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f67347b-9efb-48e0-8ed0-13f216dc2d52 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.165901] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb52b91-7427-4b45-8f90-ca9e50572bf3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.173806] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2d117b-0a24-42f3-94b9-15bc80896473 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.188248] env[68437]: DEBUG nova.compute.provider_tree [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1794.691985] env[68437]: DEBUG nova.scheduler.client.report [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1795.197679] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.112s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1795.224843] env[68437]: INFO nova.network.neutron [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating port 2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1796.565771] env[68437]: DEBUG nova.compute.manager [req-d2340035-8194-4771-98a6-441e6d3ef5af req-c3b9d062-6e1a-43c9-8817-b32fe3386bf5 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received event network-vif-plugged-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1796.566039] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2340035-8194-4771-98a6-441e6d3ef5af req-c3b9d062-6e1a-43c9-8817-b32fe3386bf5 service nova] Acquiring lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1796.566171] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2340035-8194-4771-98a6-441e6d3ef5af req-c3b9d062-6e1a-43c9-8817-b32fe3386bf5 service nova] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1796.566291] env[68437]: DEBUG oslo_concurrency.lockutils [req-d2340035-8194-4771-98a6-441e6d3ef5af req-c3b9d062-6e1a-43c9-8817-b32fe3386bf5 service nova] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1796.566485] env[68437]: DEBUG nova.compute.manager [req-d2340035-8194-4771-98a6-441e6d3ef5af req-c3b9d062-6e1a-43c9-8817-b32fe3386bf5 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] No waiting events found dispatching network-vif-plugged-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1796.566603] env[68437]: WARNING nova.compute.manager [req-d2340035-8194-4771-98a6-441e6d3ef5af req-c3b9d062-6e1a-43c9-8817-b32fe3386bf5 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received unexpected event network-vif-plugged-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 for instance with vm_state shelved_offloaded and task_state spawning. [ 1796.645573] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.645573] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1796.645573] env[68437]: DEBUG nova.network.neutron [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1797.320880] env[68437]: DEBUG nova.network.neutron [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating instance_info_cache with network_info: [{"id": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "address": "fa:16:3e:b3:73:6e", "network": {"id": "8e0dcc68-2815-467d-b279-55fb352ed3ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-275279641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d933df479af476ca27b9a4bfe6644c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ae60b40-e7", "ovs_interfaceid": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.823443] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1797.850227] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-11T18:36:02Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='3f9c80ab100f1a0a67cc19bf4d4b1326',container_format='bare',created_at=2025-03-11T18:56:22Z,direct_url=,disk_format='vmdk',id=daab69ad-ed0c-4903-b23a-7c66ae071141,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-185994525-shelved',owner='6d933df479af476ca27b9a4bfe6644c7',properties=ImageMetaProps,protected=,size=31658496,status='active',tags=,updated_at=2025-03-11T18:56:35Z,virtual_size=,visibility=), allow threads: False {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1797.850511] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Flavor limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1797.850661] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Image limits 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1797.850846] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Flavor pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1797.850993] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Image pref 0:0:0 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1797.851162] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68437) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1797.851389] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1797.851553] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1797.851721] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Got 1 possible topologies {{(pid=68437) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1797.851882] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1797.852069] env[68437]: DEBUG nova.virt.hardware [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68437) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1797.852954] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a61e09-5b49-43b9-a186-06637ce59754 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.860887] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fbf1fa-15c8-4d56-84fd-03138eedb501 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.873725] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:73:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0', 'vif_model': 'vmxnet3'}] {{(pid=68437) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1797.880975] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1797.881217] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Creating VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1797.881425] env[68437]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb5b834d-6756-422e-8ae8-7b7d30162fb0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.900394] env[68437]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1797.900394] env[68437]: value = "task-2945420" [ 1797.900394] env[68437]: _type = "Task" [ 1797.900394] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.912454] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945420, 'name': CreateVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.410561] env[68437]: DEBUG oslo_vmware.api [-] Task: {'id': task-2945420, 'name': CreateVM_Task, 'duration_secs': 0.29894} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.410712] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Created VM on the ESX host {{(pid=68437) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1798.411348] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.411517] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1798.411889] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1798.412147] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d9c0931-2cde-4b7f-94a3-7e9974941b2d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.416184] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1798.416184] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]520b7e4d-60b2-1083-e1e2-412187e6100b" [ 1798.416184] env[68437]: _type = "Task" [ 1798.416184] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.423081] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]520b7e4d-60b2-1083-e1e2-412187e6100b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.590521] env[68437]: DEBUG nova.compute.manager [req-5caa30d5-0eb8-4fbb-a0ee-9d13c44e7525 req-470317bf-d74e-44c7-83dd-85b7fbb2ae03 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received event network-changed-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1798.590694] env[68437]: DEBUG nova.compute.manager [req-5caa30d5-0eb8-4fbb-a0ee-9d13c44e7525 req-470317bf-d74e-44c7-83dd-85b7fbb2ae03 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Refreshing instance network info cache due to event network-changed-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0. {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1798.590906] env[68437]: DEBUG oslo_concurrency.lockutils [req-5caa30d5-0eb8-4fbb-a0ee-9d13c44e7525 req-470317bf-d74e-44c7-83dd-85b7fbb2ae03 service nova] Acquiring lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.591062] env[68437]: DEBUG oslo_concurrency.lockutils [req-5caa30d5-0eb8-4fbb-a0ee-9d13c44e7525 req-470317bf-d74e-44c7-83dd-85b7fbb2ae03 service nova] Acquired lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1798.591229] env[68437]: DEBUG nova.network.neutron [req-5caa30d5-0eb8-4fbb-a0ee-9d13c44e7525 req-470317bf-d74e-44c7-83dd-85b7fbb2ae03 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Refreshing network info cache for port 2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2065}} [ 1798.926657] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1798.926969] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Processing image daab69ad-ed0c-4903-b23a-7c66ae071141 {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1798.927167] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141/daab69ad-ed0c-4903-b23a-7c66ae071141.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.927327] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "[datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141/daab69ad-ed0c-4903-b23a-7c66ae071141.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1798.927513] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1798.927746] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f30f89a-fbc7-46b5-bfba-4510ec4576db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.938375] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1798.938544] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68437) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1798.939207] env[68437]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9947601-4d45-44c5-9e78-89b2ceb15769 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.943775] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1798.943775] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d97bef-4f4d-9cb4-ac66-cf9bfcb9b21c" [ 1798.943775] env[68437]: _type = "Task" [ 1798.943775] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.950685] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': session[52d18e23-934d-b881-b17b-a9e1dee55268]52d97bef-4f4d-9cb4-ac66-cf9bfcb9b21c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.259091] env[68437]: DEBUG nova.network.neutron [req-5caa30d5-0eb8-4fbb-a0ee-9d13c44e7525 req-470317bf-d74e-44c7-83dd-85b7fbb2ae03 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updated VIF entry in instance network info cache for port 2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0. {{(pid=68437) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3540}} [ 1799.259478] env[68437]: DEBUG nova.network.neutron [req-5caa30d5-0eb8-4fbb-a0ee-9d13c44e7525 req-470317bf-d74e-44c7-83dd-85b7fbb2ae03 service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating instance_info_cache with network_info: [{"id": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "address": "fa:16:3e:b3:73:6e", "network": {"id": "8e0dcc68-2815-467d-b279-55fb352ed3ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-275279641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d933df479af476ca27b9a4bfe6644c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ae60b40-e7", "ovs_interfaceid": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1799.453812] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Preparing fetch location {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1799.454053] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Fetch image to [datastore1] OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a/OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a.vmdk {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1799.454228] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Downloading stream optimized image daab69ad-ed0c-4903-b23a-7c66ae071141 to [datastore1] OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a/OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a.vmdk on the data store datastore1 as vApp {{(pid=68437) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1799.454401] env[68437]: DEBUG nova.virt.vmwareapi.images [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Downloading image file data daab69ad-ed0c-4903-b23a-7c66ae071141 to the ESX as VM named 'OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a' {{(pid=68437) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1799.524139] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1799.524139] env[68437]: value = "resgroup-9" [ 1799.524139] env[68437]: _type = "ResourcePool" [ 1799.524139] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1799.524399] env[68437]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-9e39d65a-b0d6-4f2c-a1d4-687de1e8b9db {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.546466] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lease: (returnval){ [ 1799.546466] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d49b8e-333b-9717-452e-69508bd6aea4" [ 1799.546466] env[68437]: _type = "HttpNfcLease" [ 1799.546466] env[68437]: } obtained for vApp import into resource pool (val){ [ 1799.546466] env[68437]: value = "resgroup-9" [ 1799.546466] env[68437]: _type = "ResourcePool" [ 1799.546466] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1799.546942] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the lease: (returnval){ [ 1799.546942] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d49b8e-333b-9717-452e-69508bd6aea4" [ 1799.546942] env[68437]: _type = "HttpNfcLease" [ 1799.546942] env[68437]: } to be ready. {{(pid=68437) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1799.553124] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1799.553124] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d49b8e-333b-9717-452e-69508bd6aea4" [ 1799.553124] env[68437]: _type = "HttpNfcLease" [ 1799.553124] env[68437]: } is initializing. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1799.762884] env[68437]: DEBUG oslo_concurrency.lockutils [req-5caa30d5-0eb8-4fbb-a0ee-9d13c44e7525 req-470317bf-d74e-44c7-83dd-85b7fbb2ae03 service nova] Releasing lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1800.055085] env[68437]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1800.055085] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d49b8e-333b-9717-452e-69508bd6aea4" [ 1800.055085] env[68437]: _type = "HttpNfcLease" [ 1800.055085] env[68437]: } is ready. {{(pid=68437) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1800.055609] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1800.055609] env[68437]: value = "session[52d18e23-934d-b881-b17b-a9e1dee55268]52d49b8e-333b-9717-452e-69508bd6aea4" [ 1800.055609] env[68437]: _type = "HttpNfcLease" [ 1800.055609] env[68437]: }. {{(pid=68437) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1800.056064] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5211c170-14d8-4590-bb8d-ee3605f80742 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.063032] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5262211a-a7a3-81ad-7fc9-cb8034e990cc/disk-0.vmdk from lease info. {{(pid=68437) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1800.063204] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Creating HTTP connection to write to file with size = 31658496 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5262211a-a7a3-81ad-7fc9-cb8034e990cc/disk-0.vmdk. {{(pid=68437) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1800.124809] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d8be6f51-d35b-4e04-83fc-c1fe07e65cb2 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.159837] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Completed reading data from the image iterator. {{(pid=68437) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1801.160300] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5262211a-a7a3-81ad-7fc9-cb8034e990cc/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1801.160967] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391776eb-848d-426c-8e70-713e90151f89 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.167395] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5262211a-a7a3-81ad-7fc9-cb8034e990cc/disk-0.vmdk is in state: ready. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1801.167560] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5262211a-a7a3-81ad-7fc9-cb8034e990cc/disk-0.vmdk. {{(pid=68437) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1801.167756] env[68437]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-410221da-df2d-4051-804c-8c1f720cf3c8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.390052] env[68437]: DEBUG oslo_vmware.rw_handles [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5262211a-a7a3-81ad-7fc9-cb8034e990cc/disk-0.vmdk. {{(pid=68437) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1801.390052] env[68437]: INFO nova.virt.vmwareapi.images [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Downloaded image file data daab69ad-ed0c-4903-b23a-7c66ae071141 [ 1801.390599] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd43353-fe4c-4037-8f25-c26bbaa55c7b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.406893] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bef2a8b5-fe0f-437e-97d2-5fd9bfd37be9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.431579] env[68437]: INFO nova.virt.vmwareapi.images [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] The imported VM was unregistered [ 1801.434093] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Caching image {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1801.434327] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Creating directory with path [datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141 {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1801.434572] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3592a30-dc5f-4e14-8afb-015e14b50e97 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.444893] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Created directory with path [datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141 {{(pid=68437) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1801.445095] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a/OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a.vmdk to [datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141/daab69ad-ed0c-4903-b23a-7c66ae071141.vmdk. {{(pid=68437) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1801.445334] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6804e124-4c1e-4f0a-93d6-e471f01678e0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.451770] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1801.451770] env[68437]: value = "task-2945423" [ 1801.451770] env[68437]: _type = "Task" [ 1801.451770] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.458984] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945423, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.963066] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945423, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.463110] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945423, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.965318] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945423, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.463966] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945423, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.965155] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945423, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.136859} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.965454] env[68437]: INFO nova.virt.vmwareapi.ds_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a/OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a.vmdk to [datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141/daab69ad-ed0c-4903-b23a-7c66ae071141.vmdk. [ 1803.965634] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Cleaning up location [datastore1] OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a {{(pid=68437) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1803.965799] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_6bf2e7bd-156a-4a70-84f3-5ba8e373054a {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1803.966069] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c57b8f1f-ccee-4447-b7ca-f70c9f5fe3eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.971993] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1803.971993] env[68437]: value = "task-2945424" [ 1803.971993] env[68437]: _type = "Task" [ 1803.971993] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.979552] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945424, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.482564] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945424, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086399} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.482934] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1804.483091] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "[datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141/daab69ad-ed0c-4903-b23a-7c66ae071141.vmdk" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1804.483464] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141/daab69ad-ed0c-4903-b23a-7c66ae071141.vmdk to [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b/33375d13-870c-449a-8a42-9ad4b1a24f4b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1804.483760] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33d232cb-b658-4e59-b600-1bd958136290 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.489707] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1804.489707] env[68437]: value = "task-2945425" [ 1804.489707] env[68437]: _type = "Task" [ 1804.489707] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.496732] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.000144] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945425, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.501699] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945425, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.002636] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945425, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.503745] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945425, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.006837] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945425, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.18822} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.007164] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/daab69ad-ed0c-4903-b23a-7c66ae071141/daab69ad-ed0c-4903-b23a-7c66ae071141.vmdk to [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b/33375d13-870c-449a-8a42-9ad4b1a24f4b.vmdk {{(pid=68437) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1807.008280] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49829eb-9541-4f6d-96cb-70ec7ea844eb {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.040619] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b/33375d13-870c-449a-8a42-9ad4b1a24f4b.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1807.040948] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a87349f-2e28-4c5a-96e0-61065fff1a15 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.060352] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1807.060352] env[68437]: value = "task-2945426" [ 1807.060352] env[68437]: _type = "Task" [ 1807.060352] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.067471] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945426, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.569844] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945426, 'name': ReconfigVM_Task, 'duration_secs': 0.286179} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.570179] env[68437]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b/33375d13-870c-449a-8a42-9ad4b1a24f4b.vmdk or device None with type streamOptimized {{(pid=68437) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1807.570786] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a902e2d7-ec7b-41b0-a756-0813469b06e3 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.577274] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1807.577274] env[68437]: value = "task-2945427" [ 1807.577274] env[68437]: _type = "Task" [ 1807.577274] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.585749] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945427, 'name': Rename_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.087406] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945427, 'name': Rename_Task, 'duration_secs': 0.135276} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.087704] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Powering on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1808.087939] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6684a6cb-97f2-4610-aeb9-bcbc00c8d3d8 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.094188] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1808.094188] env[68437]: value = "task-2945428" [ 1808.094188] env[68437]: _type = "Task" [ 1808.094188] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.101317] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945428, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.604471] env[68437]: DEBUG oslo_vmware.api [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945428, 'name': PowerOnVM_Task, 'duration_secs': 0.416264} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.604744] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Powered on the VM {{(pid=68437) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1808.698080] env[68437]: DEBUG nova.compute.manager [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1808.698971] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a15bb885-62f8-47c8-bd2f-474769f8e4b9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.217245] env[68437]: DEBUG oslo_concurrency.lockutils [None req-bd3ef0b9-b8d3-4b53-87e6-29f0dc6f9b49 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.155s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1810.902110] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b2d3de-3263-46c2-a34a-820b6ac28684 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.909277] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07080e24-14d9-4cf2-81fd-ac540c6293a7 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Suspending the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1810.909518] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ba879bcc-9c05-4050-83dd-53e3b6419733 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.915492] env[68437]: DEBUG oslo_vmware.api [None req-07080e24-14d9-4cf2-81fd-ac540c6293a7 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1810.915492] env[68437]: value = "task-2945429" [ 1810.915492] env[68437]: _type = "Task" [ 1810.915492] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.923776] env[68437]: DEBUG oslo_vmware.api [None req-07080e24-14d9-4cf2-81fd-ac540c6293a7 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945429, 'name': SuspendVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.426374] env[68437]: DEBUG oslo_vmware.api [None req-07080e24-14d9-4cf2-81fd-ac540c6293a7 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945429, 'name': SuspendVM_Task} progress is 100%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.926309] env[68437]: DEBUG oslo_vmware.api [None req-07080e24-14d9-4cf2-81fd-ac540c6293a7 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945429, 'name': SuspendVM_Task, 'duration_secs': 0.541526} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.926768] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-07080e24-14d9-4cf2-81fd-ac540c6293a7 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Suspended the VM {{(pid=68437) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1811.926768] env[68437]: DEBUG nova.compute.manager [None req-07080e24-14d9-4cf2-81fd-ac540c6293a7 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1811.927484] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e87891d-d92d-4dd0-b20a-4242f420f1a0 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.230649] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1813.273493] env[68437]: INFO nova.compute.manager [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Resuming [ 1813.274176] env[68437]: DEBUG nova.objects.instance [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lazy-loading 'flavor' on Instance uuid 33375d13-870c-449a-8a42-9ad4b1a24f4b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1814.785914] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.786321] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquired lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1814.786321] env[68437]: DEBUG nova.network.neutron [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Building network info cache for instance {{(pid=68437) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2068}} [ 1815.463831] env[68437]: DEBUG nova.network.neutron [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating instance_info_cache with network_info: [{"id": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "address": "fa:16:3e:b3:73:6e", "network": {"id": "8e0dcc68-2815-467d-b279-55fb352ed3ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-275279641-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d933df479af476ca27b9a4bfe6644c7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ae60b40-e7", "ovs_interfaceid": "2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.966729] env[68437]: DEBUG oslo_concurrency.lockutils [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Releasing lock "refresh_cache-33375d13-870c-449a-8a42-9ad4b1a24f4b" {{(pid=68437) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1815.967822] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89ee0c8-43e6-4b55-ad66-e47497a2c604 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.974552] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Resuming the VM {{(pid=68437) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1815.974769] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1994acf1-2ab1-4103-99f9-4bbbb4617ca5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.982289] env[68437]: DEBUG oslo_vmware.api [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1815.982289] env[68437]: value = "task-2945430" [ 1815.982289] env[68437]: _type = "Task" [ 1815.982289] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.989443] env[68437]: DEBUG oslo_vmware.api [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945430, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.492687] env[68437]: DEBUG oslo_vmware.api [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945430, 'name': PowerOnVM_Task, 'duration_secs': 0.496514} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.493010] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Resumed the VM {{(pid=68437) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1816.493272] env[68437]: DEBUG nova.compute.manager [None req-7e2628df-72f8-4f6a-80c2-14fb4edb4375 tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Checking state {{(pid=68437) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1816.494126] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e0ab98-7862-42de-bf71-bdcf9b0eaf1f {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.230990] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.733460] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.733795] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.733850] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68437) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1819.226645] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1819.226913] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.382405] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.382405] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.382405] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.884514] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1820.884514] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1820.884717] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1820.884717] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68437) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1820.885572] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa22eec7-9fd1-46da-9b03-88e4f1e85930 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.893286] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9808ecf6-9d12-43ac-a1d8-18c039a5b120 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.906600] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b14a9c-2168-4658-bc95-a640726ce7ca {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.912337] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d8ddb5-5d92-490c-9b8d-735676b11265 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.940979] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181106MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=68437) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1820.941149] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1820.941316] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1821.304760] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1821.305055] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1821.305241] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1821.305426] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1821.305597] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1821.307735] env[68437]: INFO nova.compute.manager [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Terminating instance [ 1821.811559] env[68437]: DEBUG nova.compute.manager [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Start destroying the instance on the hypervisor. {{(pid=68437) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1821.811936] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Destroying instance {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1821.813099] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50adada0-c504-4c88-a17a-332e49912266 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.820641] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Powering off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1821.820881] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-833b391e-e145-476a-a82a-9b6dcca1a04d {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.826285] env[68437]: DEBUG oslo_vmware.api [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1821.826285] env[68437]: value = "task-2945431" [ 1821.826285] env[68437]: _type = "Task" [ 1821.826285] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.833587] env[68437]: DEBUG oslo_vmware.api [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945431, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.043745] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Instance 33375d13-870c-449a-8a42-9ad4b1a24f4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68437) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1822.043966] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1822.044131] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68437) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1822.069237] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f45e41-0b91-4840-bcf6-f17709354f6c {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.076813] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b96bebf-f255-41ae-8671-05437d860f23 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.105087] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e2193f-b928-4ddf-9eed-580504707e62 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.111503] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c07f64-7d53-49c1-8a0c-91a90a9d7b20 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.123685] env[68437]: DEBUG nova.compute.provider_tree [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.335481] env[68437]: DEBUG oslo_vmware.api [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945431, 'name': PowerOffVM_Task, 'duration_secs': 0.19789} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.335745] env[68437]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Powered off the VM {{(pid=68437) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1822.335926] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Unregistering the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1822.336215] env[68437]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd0e7138-76cb-49d5-ae8f-7f369f560ca9 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.400364] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Unregistered the VM {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1822.400572] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Deleting contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1822.400755] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleting the datastore file [datastore1] 33375d13-870c-449a-8a42-9ad4b1a24f4b {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1822.401011] env[68437]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47f680fa-4866-4da7-a190-ed02926317b5 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.406357] env[68437]: DEBUG oslo_vmware.api [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for the task: (returnval){ [ 1822.406357] env[68437]: value = "task-2945433" [ 1822.406357] env[68437]: _type = "Task" [ 1822.406357] env[68437]: } to complete. {{(pid=68437) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.415010] env[68437]: DEBUG oslo_vmware.api [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945433, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.627532] env[68437]: DEBUG nova.scheduler.client.report [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1822.915302] env[68437]: DEBUG oslo_vmware.api [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Task: {'id': task-2945433, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140399} completed successfully. {{(pid=68437) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.915689] env[68437]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleted the datastore file {{(pid=68437) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1822.915738] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Deleted contents of the VM from datastore datastore1 {{(pid=68437) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1822.915898] env[68437]: DEBUG nova.virt.vmwareapi.vmops [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Instance destroyed {{(pid=68437) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1822.916098] env[68437]: INFO nova.compute.manager [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1822.916363] env[68437]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68437) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1822.916556] env[68437]: DEBUG nova.compute.manager [-] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Deallocating network for instance {{(pid=68437) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1822.916653] env[68437]: DEBUG nova.network.neutron [-] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] deallocate_for_instance() {{(pid=68437) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1861}} [ 1823.132848] env[68437]: DEBUG nova.compute.resource_tracker [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68437) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1823.133099] env[68437]: DEBUG oslo_concurrency.lockutils [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.192s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1823.133342] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.133477] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Cleaning up deleted instances with incomplete migration {{(pid=68437) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 1823.173870] env[68437]: DEBUG nova.compute.manager [req-20d76b83-d943-46fa-9f4a-5c3565b92220 req-6abfe57b-013a-47b1-a24c-0b78d2a289ea service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Received event network-vif-deleted-2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0 {{(pid=68437) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1823.173927] env[68437]: INFO nova.compute.manager [req-20d76b83-d943-46fa-9f4a-5c3565b92220 req-6abfe57b-013a-47b1-a24c-0b78d2a289ea service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Neutron deleted interface 2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0; detaching it from the instance and deleting it from the info cache [ 1823.174066] env[68437]: DEBUG nova.network.neutron [req-20d76b83-d943-46fa-9f4a-5c3565b92220 req-6abfe57b-013a-47b1-a24c-0b78d2a289ea service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.658855] env[68437]: DEBUG nova.network.neutron [-] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Updating instance_info_cache with network_info: [] {{(pid=68437) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.676835] env[68437]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7169176-96a7-46b9-900a-023670bc5df1 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.686761] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b217083d-c27b-462e-8890-7e7f4c181b95 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.710423] env[68437]: DEBUG nova.compute.manager [req-20d76b83-d943-46fa-9f4a-5c3565b92220 req-6abfe57b-013a-47b1-a24c-0b78d2a289ea service nova] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Detach interface failed, port_id=2ae60b40-e7ac-4c0a-bd1d-a93fab7192d0, reason: Instance 33375d13-870c-449a-8a42-9ad4b1a24f4b could not be found. {{(pid=68437) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1824.161598] env[68437]: INFO nova.compute.manager [-] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Took 1.24 seconds to deallocate network for instance. [ 1824.668691] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1824.668970] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1824.669235] env[68437]: DEBUG nova.objects.instance [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lazy-loading 'resources' on Instance uuid 33375d13-870c-449a-8a42-9ad4b1a24f4b {{(pid=68437) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1825.202717] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb66971a-7af4-4621-aca1-7f56a355fe84 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.209706] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03485ddc-1e94-45f2-a0a5-882712f9c97b {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.240056] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8434e834-f49f-45a3-bee1-8aba33eb3702 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.247819] env[68437]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4134156-aca9-4a03-bed6-ee9f5c7fbf31 {{(pid=68437) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.260997] env[68437]: DEBUG nova.compute.provider_tree [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed in ProviderTree for provider: 422e986f-b38b-46ad-94b3-91f3ccd10a05 {{(pid=68437) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1825.763891] env[68437]: DEBUG nova.scheduler.client.report [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Inventory has not changed for provider 422e986f-b38b-46ad-94b3-91f3ccd10a05 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68437) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1826.268786] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1826.285960] env[68437]: INFO nova.scheduler.client.report [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Deleted allocations for instance 33375d13-870c-449a-8a42-9ad4b1a24f4b [ 1826.486655] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.486898] env[68437]: DEBUG oslo_service.periodic_task [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68437) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.487070] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] Cleaning up deleted instances {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 1826.794450] env[68437]: DEBUG oslo_concurrency.lockutils [None req-b6e7ba93-b384-48b4-93a4-5116a4e6cf5a tempest-ServersNegativeTestJSON-11266387 tempest-ServersNegativeTestJSON-11266387-project-member] Lock "33375d13-870c-449a-8a42-9ad4b1a24f4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.489s {{(pid=68437) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1826.991528] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] There are 7 instances to clean {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 1826.991916] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 2cb7f89c-1d21-4577-8131-b93961b7ab1d] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1827.494659] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 33375d13-870c-449a-8a42-9ad4b1a24f4b] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1827.998494] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 47ba3b7e-23dd-4967-9850-b99c1dca219e] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1828.504099] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: a5ce6701-f5d2-4eb7-9d6c-3ace121de308] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1829.007947] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 0f078ed3-d253-4bc4-901c-3c84027392b4] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1829.510059] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 95b8784f-89e4-4ca3-b852-db9417e5b8b8] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1830.013425] env[68437]: DEBUG nova.compute.manager [None req-110d170f-68fe-4aba-8cf9-9501e4c4efe4 None None] [instance: 8b80927c-1cda-4652-8c2e-df39c93bae78] Instance has had 0 of 5 cleanup attempts {{(pid=68437) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}}